mirror of
https://github.com/openai/codex.git
synced 2026-02-01 22:47:52 +00:00
chore(core) move model_instructions_template config (#9871)
## Summary Move `model_instructions_template` config to the experimental slug while we iterate on this feature ## Testing - [x] Tested locally, unit tests still pass
This commit is contained in:
@@ -433,7 +433,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.2-codex".to_string()),
|
||||
model: Some("exp-codex-personality".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -169,6 +169,16 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
model_info!(
|
||||
slug,
|
||||
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
|
||||
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
|
||||
Personality::Friendly,
|
||||
PERSONALITY_FRIENDLY.to_string(),
|
||||
), (
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
}),
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
supports_parallel_tool_calls: true,
|
||||
@@ -203,16 +213,6 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
|
||||
truncation_policy: TruncationPolicyConfig::tokens(10_000),
|
||||
context_window: Some(CONTEXT_WINDOW_272K),
|
||||
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh(),
|
||||
model_instructions_template: Some(ModelInstructionsTemplate {
|
||||
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
|
||||
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
|
||||
Personality::Friendly,
|
||||
PERSONALITY_FRIENDLY.to_string(),
|
||||
), (
|
||||
Personality::Pragmatic,
|
||||
PERSONALITY_PRAGMATIC.to_string(),
|
||||
)]))),
|
||||
}),
|
||||
)
|
||||
} else if slug.starts_with("gpt-5.1-codex-max") {
|
||||
model_info!(
|
||||
|
||||
@@ -122,7 +122,7 @@ async fn config_personality_some_sets_instructions_template() -> anyhow::Result<
|
||||
let server = start_mock_server().await;
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp-1")).await;
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5.2-codex")
|
||||
.with_model("exp-codex-personality")
|
||||
.with_config(|config| {
|
||||
config.model_personality = Some(Personality::Friendly);
|
||||
config.features.disable(Feature::RemoteModels);
|
||||
@@ -179,7 +179,7 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()>
|
||||
)
|
||||
.await;
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5.2-codex")
|
||||
.with_model("exp-codex-personality")
|
||||
.with_config(|config| {
|
||||
config.features.disable(Feature::RemoteModels);
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user