mirror of
https://github.com/openai/codex.git
synced 2026-04-29 17:06:51 +00:00
app-server: Add back pressure and batching to command/exec (#15547)
* Add `OutgoingMessageSender::send_server_notification_to_connection_and_wait` which returns only once message is written to websocket (or failed to do so) * Use this mechanism to apply back pressure to stdout/stderr streams of processes spawned by `command/exec`, to limit them to at most one message in-memory at a time * Use back pressure signal to also batch smaller chunks into ≈64KiB ones This should make commands execution more robust over high-latency/low-throughput networks
This commit is contained in:
committed by
GitHub
parent
daf5e584c2
commit
d61c03ca08
@@ -22,6 +22,7 @@ use crate::message_processor::MessageProcessorArgs;
|
||||
use crate::outgoing_message::ConnectionId;
|
||||
use crate::outgoing_message::OutgoingEnvelope;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::QueuedOutgoingMessage;
|
||||
use crate::transport::CHANNEL_CAPACITY;
|
||||
use crate::transport::ConnectionState;
|
||||
use crate::transport::OutboundConnectionState;
|
||||
@@ -103,7 +104,7 @@ enum OutboundControlEvent {
|
||||
/// Register a new writer for an opened connection.
|
||||
Opened {
|
||||
connection_id: ConnectionId,
|
||||
writer: mpsc::Sender<crate::outgoing_message::OutgoingMessage>,
|
||||
writer: mpsc::Sender<QueuedOutgoingMessage>,
|
||||
disconnect_sender: Option<CancellationToken>,
|
||||
initialized: Arc<AtomicBool>,
|
||||
experimental_api_enabled: Arc<AtomicBool>,
|
||||
|
||||
Reference in New Issue
Block a user