Update models.json (#18586)

- Replace the active models-manager catalog with the deleted core
catalog contents.
- Replace stale hardcoded test model slugs with current bundled model
slugs.
- Keep this as a stacked change on top of the cleanup PR.
This commit is contained in:
Ahmed Ibrahim
2026-04-20 10:27:01 -07:00
committed by GitHub
parent 5d5d610740
commit 316cf0e90b
63 changed files with 540 additions and 1016 deletions

View File

@@ -50,7 +50,7 @@ async fn personality_does_not_mutate_base_instructions_without_template() {
.expect("test config should allow feature update");
config.personality = Some(Personality::Friendly);
let model_info = codex_core::test_support::construct_model_info_offline("gpt-5.1", &config);
let model_info = codex_core::test_support::construct_model_info_offline("gpt-5.4", &config);
assert_eq!(
model_info.get_model_instructions(config.personality),
model_info.base_instructions
@@ -69,7 +69,7 @@ async fn base_instructions_override_disables_personality_template() {
config.base_instructions = Some("override instructions".to_string());
let model_info =
codex_core::test_support::construct_model_info_offline("gpt-5.2-codex", &config);
codex_core::test_support::construct_model_info_offline("gpt-5.3-codex", &config);
assert_eq!(model_info.base_instructions, "override instructions");
assert_eq!(
@@ -85,7 +85,7 @@ async fn user_turn_personality_none_does_not_add_update_message() -> anyhow::Res
let server = start_mock_server().await;
let resp_mock = mount_sse_once(&server, sse_completed("resp-1")).await;
let mut builder = test_codex()
.with_model("gpt-5.2-codex")
.with_model("gpt-5.3-codex")
.with_config(|config| {
config
.features
@@ -135,7 +135,7 @@ async fn config_personality_some_sets_instructions_template() -> anyhow::Result<
let server = start_mock_server().await;
let resp_mock = mount_sse_once(&server, sse_completed("resp-1")).await;
let mut builder = test_codex()
.with_model("gpt-5.2-codex")
.with_model("gpt-5.3-codex")
.with_config(|config| {
config
.features
@@ -193,7 +193,7 @@ async fn config_personality_none_sends_no_personality() -> anyhow::Result<()> {
let server = start_mock_server().await;
let resp_mock = mount_sse_once(&server, sse_completed("resp-1")).await;
let mut builder = test_codex()
.with_model("gpt-5.2-codex")
.with_model("gpt-5.3-codex")
.with_config(|config| {
config
.features
@@ -258,7 +258,7 @@ async fn default_personality_is_pragmatic_without_config_toml() -> anyhow::Resul
let server = start_mock_server().await;
let resp_mock = mount_sse_once(&server, sse_completed("resp-1")).await;
let mut builder = test_codex()
.with_model("gpt-5.2-codex")
.with_model("gpt-5.3-codex")
.with_config(|config| {
config
.features
@@ -513,7 +513,7 @@ async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
config.personality = Some(Personality::Friendly);
let model_info =
codex_core::test_support::construct_model_info_offline("gpt-5.2-codex", &config);
codex_core::test_support::construct_model_info_offline("gpt-5.3-codex", &config);
assert_eq!(
model_info.get_model_instructions(config.personality),
model_info.base_instructions
@@ -814,7 +814,7 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
.features
.enable(Feature::Personality)
.expect("test config should allow feature update");
config.model = Some("gpt-5.2-codex".to_string());
config.model = Some("gpt-5.3-codex".to_string());
});
let test = builder.build(&server).await?;