Update models.json (#18586)

- Replace the active models-manager catalog with the deleted core
catalog contents.
- Replace stale hardcoded test model slugs with current bundled model
slugs.
- Keep this as a stacked change on top of the cleanup PR.
This commit is contained in:
Ahmed Ibrahim
2026-04-20 10:27:01 -07:00
committed by GitHub
parent 5d5d610740
commit 316cf0e90b
63 changed files with 540 additions and 1016 deletions

View File

@@ -168,7 +168,7 @@ async fn execpolicy_blocks_shell_invocation() -> Result<()> {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn shell_command_empty_script_with_collaboration_mode_does_not_panic() -> Result<()> {
let server = start_mock_server().await;
let mut builder = test_codex().with_model("gpt-5").with_config(|config| {
let mut builder = test_codex().with_model("gpt-5.2").with_config(|config| {
config
.features
.enable(Feature::CollaborationModes)
@@ -223,7 +223,7 @@ async fn shell_command_empty_script_with_collaboration_mode_does_not_panic() ->
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn unified_exec_empty_script_with_collaboration_mode_does_not_panic() -> Result<()> {
let server = start_mock_server().await;
let mut builder = test_codex().with_model("gpt-5").with_config(|config| {
let mut builder = test_codex().with_model("gpt-5.2").with_config(|config| {
config
.features
.enable(Feature::UnifiedExec)
@@ -282,7 +282,7 @@ async fn unified_exec_empty_script_with_collaboration_mode_does_not_panic() -> R
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn shell_command_whitespace_script_with_collaboration_mode_does_not_panic() -> Result<()> {
let server = start_mock_server().await;
let mut builder = test_codex().with_model("gpt-5").with_config(|config| {
let mut builder = test_codex().with_model("gpt-5.2").with_config(|config| {
config
.features
.enable(Feature::CollaborationModes)
@@ -337,7 +337,7 @@ async fn shell_command_whitespace_script_with_collaboration_mode_does_not_panic(
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn unified_exec_whitespace_script_with_collaboration_mode_does_not_panic() -> Result<()> {
let server = start_mock_server().await;
let mut builder = test_codex().with_model("gpt-5").with_config(|config| {
let mut builder = test_codex().with_model("gpt-5.2").with_config(|config| {
config
.features
.enable(Feature::UnifiedExec)