Update models.json (#11274)

Automated update of models.json.

---------

Co-authored-by: aibrahim-oai <219906144+aibrahim-oai@users.noreply.github.com>
Co-authored-by: Ahmed Ibrahim <aibrahim@openai.com>
Co-authored-by: Sayan Sisodiya <sayan@openai.com>
This commit is contained in:
github-actions[bot]
2026-02-10 14:28:18 -08:00
committed by GitHub
parent 3419660767
commit 3626399811
3 changed files with 69 additions and 63 deletions

File diff suppressed because one or more lines are too long

View File

@@ -1,60 +0,0 @@
use anyhow::Result;
use codex_core::CodexAuth;
use codex_core::ThreadManager;
use codex_core::built_in_model_providers;
use codex_core::models_manager::manager::RefreshStrategy;
use core_test_support::load_default_config_for_test;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn list_models_returns_api_key_models() -> Result<()> {
let codex_home = tempdir()?;
let config = load_default_config_for_test(&codex_home).await;
let manager = ThreadManager::with_models_provider(
CodexAuth::from_api_key("sk-test"),
built_in_model_providers()["openai"].clone(),
);
let models = manager
.list_models(&config, RefreshStrategy::OnlineIfUncached)
.await;
let slugs: Vec<String> = models.into_iter().map(|m| m.id).collect();
assert_eq!(expected_slugs(), slugs);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn list_models_returns_chatgpt_models() -> Result<()> {
let codex_home = tempdir()?;
let config = load_default_config_for_test(&codex_home).await;
let manager = ThreadManager::with_models_provider(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
built_in_model_providers()["openai"].clone(),
);
let models = manager
.list_models(&config, RefreshStrategy::OnlineIfUncached)
.await;
let slugs: Vec<String> = models.into_iter().map(|m| m.id).collect();
assert_eq!(expected_slugs(), slugs);
Ok(())
}
fn expected_slugs() -> Vec<String> {
vec![
"gpt-5.2-codex".into(),
"gpt-5.1-codex-max".into(),
"gpt-5.1-codex".into(),
"gpt-5.2".into(),
"gpt-5.1".into(),
"gpt-5-codex".into(),
"gpt-5".into(),
"gpt-5.1-codex-mini".into(),
"gpt-5-codex-mini".into(),
"bengalfox".into(),
"boomslang".into(),
]
}

View File

@@ -79,7 +79,6 @@ mod image_rollout;
mod items;
mod json_result;
mod list_dir;
mod list_models;
mod live_cli;
mod live_reload;
mod model_info_overrides;