mirror of
https://github.com/openai/codex.git
synced 2026-02-02 23:13:37 +00:00
Compare commits
1 Commits
remove/doc
...
dev/jcoens
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ce85be18be |
@@ -94,14 +94,16 @@ pub struct ModelClient {
|
||||
model: String,
|
||||
client: reqwest::Client,
|
||||
provider: ModelProviderInfo,
|
||||
reasoning_level: String,
|
||||
}
|
||||
|
||||
impl ModelClient {
|
||||
pub fn new(model: impl ToString, provider: ModelProviderInfo) -> Self {
|
||||
pub fn new(model: impl ToString, provider: ModelProviderInfo, reasoning_level: String) -> Self {
|
||||
Self {
|
||||
model: model.to_string(),
|
||||
client: reqwest::Client::new(),
|
||||
provider,
|
||||
reasoning_level,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,7 +174,7 @@ impl ModelClient {
|
||||
tool_choice: "auto",
|
||||
parallel_tool_calls: false,
|
||||
reasoning: Some(Reasoning {
|
||||
effort: "high",
|
||||
effort: &self.reasoning_level,
|
||||
generate_summary: None,
|
||||
}),
|
||||
previous_response_id: prompt.prev_id.clone(),
|
||||
|
||||
@@ -33,8 +33,8 @@ pub enum ResponseEvent {
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub(crate) struct Reasoning {
|
||||
pub(crate) effort: &'static str,
|
||||
pub(crate) struct Reasoning<'a> {
|
||||
pub(crate) effort: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) generate_summary: Option<bool>,
|
||||
}
|
||||
@@ -51,7 +51,7 @@ pub(crate) struct Payload<'a> {
|
||||
pub(crate) tools: &'a [serde_json::Value],
|
||||
pub(crate) tool_choice: &'static str,
|
||||
pub(crate) parallel_tool_calls: bool,
|
||||
pub(crate) reasoning: Option<Reasoning>,
|
||||
pub(crate) reasoning: Option<Reasoning<'a>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) previous_response_id: Option<String>,
|
||||
/// true when using the Responses API.
|
||||
|
||||
@@ -92,6 +92,7 @@ impl Codex {
|
||||
disable_response_storage: config.disable_response_storage,
|
||||
notify: config.notify.clone(),
|
||||
cwd: config.cwd.clone(),
|
||||
reasoning_level: config.reasoning_level.clone(),
|
||||
};
|
||||
|
||||
tokio::spawn(submission_loop(config, rx_sub, tx_event, ctrl_c));
|
||||
@@ -521,6 +522,7 @@ async fn submission_loop(
|
||||
disable_response_storage,
|
||||
notify,
|
||||
cwd,
|
||||
reasoning_level,
|
||||
} => {
|
||||
info!("Configuring session: model={model}; provider={provider:?}");
|
||||
if !cwd.is_absolute() {
|
||||
@@ -536,7 +538,7 @@ async fn submission_loop(
|
||||
return;
|
||||
}
|
||||
|
||||
let client = ModelClient::new(model.clone(), provider.clone());
|
||||
let client = ModelClient::new(model.clone(), provider.clone(), reasoning_level);
|
||||
|
||||
// abort any current running session and clone its state
|
||||
let retain_zdr_transcript =
|
||||
|
||||
@@ -72,6 +72,9 @@ pub struct Config {
|
||||
|
||||
/// Combined provider map (defaults merged with user-defined overrides).
|
||||
pub model_providers: HashMap<String, ModelProviderInfo>,
|
||||
|
||||
/// Reasoning level for the agent.
|
||||
pub reasoning_level: String,
|
||||
}
|
||||
|
||||
/// Base config deserialized from ~/.codex/config.toml.
|
||||
@@ -111,6 +114,9 @@ pub struct ConfigToml {
|
||||
/// User-defined provider entries that extend/override the built-in list.
|
||||
#[serde(default)]
|
||||
pub model_providers: HashMap<String, ModelProviderInfo>,
|
||||
|
||||
/// Optional override of reasoning level for the agent.
|
||||
pub reasoning_level: Option<String>,
|
||||
}
|
||||
|
||||
impl ConfigToml {
|
||||
@@ -171,6 +177,7 @@ pub struct ConfigOverrides {
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
pub disable_response_storage: Option<bool>,
|
||||
pub provider: Option<String>,
|
||||
pub reasoning_level: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -199,6 +206,7 @@ impl Config {
|
||||
sandbox_policy,
|
||||
disable_response_storage,
|
||||
provider,
|
||||
reasoning_level,
|
||||
} = overrides;
|
||||
|
||||
let sandbox_policy = match sandbox_policy {
|
||||
@@ -263,6 +271,9 @@ impl Config {
|
||||
disable_response_storage: disable_response_storage
|
||||
.or(cfg.disable_response_storage)
|
||||
.unwrap_or(false),
|
||||
reasoning_level: reasoning_level
|
||||
.or(cfg.reasoning_level)
|
||||
.unwrap_or_else(default_reasoning_level),
|
||||
notify: cfg.notify,
|
||||
instructions,
|
||||
mcp_servers: cfg.mcp_servers,
|
||||
@@ -292,6 +303,10 @@ fn default_model() -> String {
|
||||
OPENAI_DEFAULT_MODEL.to_string()
|
||||
}
|
||||
|
||||
fn default_reasoning_level() -> String {
|
||||
"high".into()
|
||||
}
|
||||
|
||||
/// Returns the path to the Codex configuration directory, which is `~/.codex`.
|
||||
/// Does not verify that the directory exists.
|
||||
pub fn codex_dir() -> std::io::Result<PathBuf> {
|
||||
|
||||
@@ -60,6 +60,9 @@ pub enum Op {
|
||||
/// `ConfigureSession` operation so that the business-logic layer can
|
||||
/// operate deterministically.
|
||||
cwd: std::path::PathBuf,
|
||||
|
||||
/// Reasoning level for the agent.
|
||||
reasoning_level: String,
|
||||
},
|
||||
|
||||
/// Abort current task.
|
||||
|
||||
@@ -39,6 +39,10 @@ pub struct Cli {
|
||||
|
||||
/// Initial instructions for the agent.
|
||||
pub prompt: String,
|
||||
|
||||
/// Reasoning level for the agent.
|
||||
#[arg(long = "reasoning", value_parser = ["low", "medium", "high"])]
|
||||
pub reasoning_level: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, ValueEnum)]
|
||||
|
||||
@@ -32,6 +32,7 @@ pub async fn run_main(cli: Cli) -> anyhow::Result<()> {
|
||||
disable_response_storage,
|
||||
color,
|
||||
prompt,
|
||||
reasoning_level,
|
||||
} = cli;
|
||||
|
||||
let (stdout_with_ansi, stderr_with_ansi) = match color {
|
||||
@@ -63,6 +64,7 @@ pub async fn run_main(cli: Cli) -> anyhow::Result<()> {
|
||||
},
|
||||
cwd: cwd.map(|p| p.canonicalize().unwrap_or(p)),
|
||||
provider: None,
|
||||
reasoning_level,
|
||||
};
|
||||
let config = Config::load_with_overrides(overrides)?;
|
||||
|
||||
|
||||
@@ -159,6 +159,7 @@ impl CodexToolCallParam {
|
||||
sandbox_policy,
|
||||
disable_response_storage,
|
||||
provider: None,
|
||||
reasoning_level: None, // TODO: Thread this from JsonSchema
|
||||
};
|
||||
|
||||
let cfg = codex_core::config::Config::load_with_overrides(overrides)?;
|
||||
|
||||
@@ -39,4 +39,8 @@ pub struct Cli {
|
||||
/// Disable server‑side response storage (sends the full conversation context with every request)
|
||||
#[arg(long = "disable-response-storage", default_value_t = false)]
|
||||
pub disable_response_storage: bool,
|
||||
|
||||
/// Configure the amount of reasoning the model should do before executing a command.
|
||||
#[arg(long = "reasoning", value_parser = ["low", "medium", "high"])]
|
||||
pub reasoning_level: Option<String>,
|
||||
}
|
||||
|
||||
@@ -56,6 +56,7 @@ pub fn run_main(cli: Cli) -> std::io::Result<()> {
|
||||
},
|
||||
cwd: cli.cwd.clone().map(|p| p.canonicalize().unwrap_or(p)),
|
||||
provider: None,
|
||||
reasoning_level: cli.reasoning_level.clone(),
|
||||
};
|
||||
#[allow(clippy::print_stderr)]
|
||||
match Config::load_with_overrides(overrides) {
|
||||
|
||||
Reference in New Issue
Block a user