Compare commits

...

4 Commits

Author SHA1 Message Date
Daniel Edrisian
9229c519d2 Revert "add repeat for windows ci?"
This reverts commit 631e1eb6b7.
2025-09-13 21:17:37 -07:00
Daniel Edrisian
631e1eb6b7 add repeat for windows ci? 2025-09-13 21:17:08 -07:00
Daniel Edrisian
94aa0268b2 Try attempt fix flaky windows CI 2025-09-13 20:56:16 -07:00
Daniel Edrisian
0fab7ae7a3 Don't collect pending input in review mode 2025-09-13 20:05:03 -07:00
2 changed files with 15 additions and 12 deletions

View File

@@ -1622,15 +1622,6 @@ async fn run_task(
let mut auto_compact_recently_attempted = false;
loop {
// Note that pending_input would be something like a message the user
// submitted through the UI while the model was running. Though the UI
// may support this, the model might not.
let pending_input = sess
.get_pending_input()
.into_iter()
.map(ResponseItem::from)
.collect::<Vec<ResponseItem>>();
// Construct the input that we will send to the model.
//
// - For review threads, use the isolated in-memory history so the
@@ -1642,11 +1633,17 @@ async fn run_task(
// only record the new items that originated in this turn so that it
// represents an append-only log without duplicates.
let turn_input: Vec<ResponseItem> = if is_review_mode {
if !pending_input.is_empty() {
review_thread_history.extend(pending_input);
}
review_thread_history.clone()
} else {
// Note that pending_input would be something like a message the user
// submitted through the UI while the model was running. Though the UI
// may support this, the model might not.
let pending_input = sess
.get_pending_input()
.into_iter()
.map(ResponseItem::from)
.collect::<Vec<ResponseItem>>();
sess.record_conversation_items(&pending_input).await;
sess.turn_input_with_history(pending_input)
};

View File

@@ -507,6 +507,8 @@ async fn start_responses_server_with_sse(sse_raw: &str, expected_requests: usize
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "text/event-stream")
// Ensure clients don't wait for a keepalive connection: close after body.
.insert_header("connection", "close")
.set_body_raw(sse.clone(), "text/event-stream"),
)
.expect(expected_requests as u64)
@@ -527,6 +529,10 @@ where
{
let model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
// Keep the SSE stream responsive in tests: if the mock server leaves the
// connection open after sending the final event, this short idle timeout
// will cause the client to emit an error and let the turn complete.
stream_idle_timeout_ms: Some(1_000),
..built_in_model_providers()["openai"].clone()
};
let mut config = load_default_config_for_test(codex_home);