From c8b688655ca827e5fb4b759becc551fc17f52fed Mon Sep 17 00:00:00 2001 From: joshualitt Date: Wed, 10 Dec 2025 09:36:27 -0800 Subject: [PATCH] feat(core): Plumbing for late resolution of model configs. (#14597) --- docs/get-started/configuration.md | 5 + packages/cli/src/config/settingsSchema.ts | 10 + packages/core/src/config/config.test.ts | 24 ++ packages/core/src/config/config.ts | 18 +- .../core/src/config/defaultModelConfigs.ts | 10 + .../src/services/modelConfig.golden.test.ts | 53 +++++ .../src/services/modelConfigService.test.ts | 118 ++++++++++ .../core/src/services/modelConfigService.ts | 17 +- .../resolved-aliases-retry.golden.json | 222 ++++++++++++++++++ schemas/settings.schema.json | 25 +- 10 files changed, 493 insertions(+), 9 deletions(-) create mode 100644 packages/core/src/services/test-data/resolved-aliases-retry.golden.json diff --git a/docs/get-started/configuration.md b/docs/get-started/configuration.md index 71167e818a..ff4160128b 100644 --- a/docs/get-started/configuration.md +++ b/docs/get-started/configuration.md @@ -524,6 +524,11 @@ their corresponding top-level category object in your `settings.json` file. with (and override) the built-in aliases. - **Default:** `{}` +- **`modelConfigs.customOverrides`** (array): + - **Description:** Custom model config overrides. These are merged with (and + added to) the built-in overrides. + - **Default:** `[]` + - **`modelConfigs.overrides`** (array): - **Description:** Apply specific configuration overrides based on matches, with a primary key of model (or alias). The most specific match will be diff --git a/packages/cli/src/config/settingsSchema.ts b/packages/cli/src/config/settingsSchema.ts index 5efca6f20f..e35da0fdc1 100644 --- a/packages/cli/src/config/settingsSchema.ts +++ b/packages/cli/src/config/settingsSchema.ts @@ -742,6 +742,16 @@ const SETTINGS_SCHEMA = { 'Custom named presets for model configs. These are merged with (and override) the built-in aliases.', showInDialog: false, }, + customOverrides: { + type: 'array', + label: 'Custom Model Config Overrides', + category: 'Model', + requiresRestart: false, + default: [], + description: + 'Custom model config overrides. These are merged with (and added to) the built-in overrides.', + showInDialog: false, + }, overrides: { type: 'array', label: 'Model Config Overrides', diff --git a/packages/core/src/config/config.test.ts b/packages/core/src/config/config.test.ts index 90247b4441..554cee04bf 100644 --- a/packages/core/src/config/config.test.ts +++ b/packages/core/src/config/config.test.ts @@ -1344,6 +1344,30 @@ describe('Generation Config Merging (HACK)', () => { expect(serviceConfig.overrides).toEqual(userOverrides); }); + it('should merge default overrides when user provides only aliases', () => { + const userAliases = { + 'my-alias': { + modelConfig: { model: 'my-model' }, + }, + }; + + const params: ConfigParameters = { + ...baseParams, + modelConfigServiceConfig: { + aliases: userAliases, + }, + }; + + const config = new Config(params); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const serviceConfig = (config.modelConfigService as any).config; + + // Assert that the user's aliases are present + expect(serviceConfig.aliases).toEqual(userAliases); + // Assert that the default overrides are present + expect(serviceConfig.overrides).toEqual(DEFAULT_MODEL_CONFIGS.overrides); + }); + it('should use user-provided aliases if they exist', () => { const userAliases = { 'my-alias': { diff --git a/packages/core/src/config/config.ts b/packages/core/src/config/config.ts index 678af20cd0..7b033b5037 100644 --- a/packages/core/src/config/config.ts +++ b/packages/core/src/config/config.ts @@ -629,11 +629,19 @@ export class Config { // TODO(12593): Fix the settings loading logic to properly merge defaults and // remove this hack. let modelConfigServiceConfig = params.modelConfigServiceConfig; - if (modelConfigServiceConfig && !modelConfigServiceConfig.aliases) { - modelConfigServiceConfig = { - ...modelConfigServiceConfig, - aliases: DEFAULT_MODEL_CONFIGS.aliases, - }; + if (modelConfigServiceConfig) { + if (!modelConfigServiceConfig.aliases) { + modelConfigServiceConfig = { + ...modelConfigServiceConfig, + aliases: DEFAULT_MODEL_CONFIGS.aliases, + }; + } + if (!modelConfigServiceConfig.overrides) { + modelConfigServiceConfig = { + ...modelConfigServiceConfig, + overrides: DEFAULT_MODEL_CONFIGS.overrides, + }; + } } this.modelConfigService = new ModelConfigService( diff --git a/packages/core/src/config/defaultModelConfigs.ts b/packages/core/src/config/defaultModelConfigs.ts index 69aa2effcb..dd228ac4e6 100644 --- a/packages/core/src/config/defaultModelConfigs.ts +++ b/packages/core/src/config/defaultModelConfigs.ts @@ -209,4 +209,14 @@ export const DEFAULT_MODEL_CONFIGS: ModelConfigServiceConfig = { }, }, }, + overrides: [ + { + match: { model: 'chat-base', isRetry: true }, + modelConfig: { + generateContentConfig: { + temperature: 1, + }, + }, + }, + ], }; diff --git a/packages/core/src/services/modelConfig.golden.test.ts b/packages/core/src/services/modelConfig.golden.test.ts index c11f763306..3b490a6dac 100644 --- a/packages/core/src/services/modelConfig.golden.test.ts +++ b/packages/core/src/services/modelConfig.golden.test.ts @@ -18,6 +18,14 @@ const GOLDEN_FILE_PATH = path.resolve( 'resolved-aliases.golden.json', ); +const RETRY_GOLDEN_FILE_PATH = path.resolve( + process.cwd(), + 'src', + 'services', + 'test-data', + 'resolved-aliases-retry.golden.json', +); + describe('ModelConfigService Golden Test', () => { it('should match the golden file for resolved default aliases', async () => { const service = new ModelConfigService(DEFAULT_MODEL_CONFIGS); @@ -60,4 +68,49 @@ describe('ModelConfigService Golden Test', () => { 'Golden file mismatch. If the new resolved aliases are correct, run the test with `UPDATE_GOLDENS=true` to regenerate the golden file.', ).toEqual(goldenData); }); + + it('should match the golden file for resolved default aliases with isRetry=true', async () => { + const service = new ModelConfigService(DEFAULT_MODEL_CONFIGS); + const aliases = Object.keys(DEFAULT_MODEL_CONFIGS.aliases ?? {}); + + const resolvedAliases: Record = {}; + for (const alias of aliases) { + resolvedAliases[alias] = + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (service as any).internalGetResolvedConfig({ + model: alias, + isRetry: true, + }); + } + + if (process.env['UPDATE_GOLDENS']) { + await fs.mkdir(path.dirname(RETRY_GOLDEN_FILE_PATH), { recursive: true }); + await fs.writeFile( + RETRY_GOLDEN_FILE_PATH, + JSON.stringify(resolvedAliases, null, 2), + 'utf-8', + ); + // In update mode, we pass the test after writing the file. + return; + } + + let goldenContent: string; + try { + goldenContent = await fs.readFile(RETRY_GOLDEN_FILE_PATH, 'utf-8'); + } catch (e) { + if ((e as NodeJS.ErrnoException).code === 'ENOENT') { + throw new Error( + 'Golden file not found. Run with `UPDATE_GOLDENS=true` to create it.', + ); + } + throw e; + } + + const goldenData = JSON.parse(goldenContent); + + expect( + resolvedAliases, + 'Golden file mismatch. If the new resolved aliases are correct, run the test with `UPDATE_GOLDENS=true` to regenerate the golden file.', + ).toEqual(goldenData); + }); }); diff --git a/packages/core/src/services/modelConfigService.test.ts b/packages/core/src/services/modelConfigService.test.ts index 0ec08c4535..8d08e4f775 100644 --- a/packages/core/src/services/modelConfigService.test.ts +++ b/packages/core/src/services/modelConfigService.test.ts @@ -697,4 +697,122 @@ describe('ModelConfigService', () => { }); }); }); + + describe('custom overrides', () => { + it('should apply custom overrides on top of defaults', () => { + const config: ModelConfigServiceConfig = { + aliases: { + 'test-alias': { + modelConfig: { + model: 'gemini-test', + generateContentConfig: { temperature: 0.5 }, + }, + }, + }, + overrides: [ + { + match: { model: 'test-alias' }, + modelConfig: { generateContentConfig: { temperature: 0.6 } }, + }, + ], + customOverrides: [ + { + match: { model: 'test-alias' }, + modelConfig: { generateContentConfig: { temperature: 0.7 } }, + }, + ], + }; + const service = new ModelConfigService(config); + const resolved = service.getResolvedConfig({ model: 'test-alias' }); + + // Custom overrides should be appended to overrides, so they win + expect(resolved.generateContentConfig.temperature).toBe(0.7); + }); + }); + + describe('retry behavior', () => { + it('should apply retry-specific overrides when isRetry is true', () => { + const config: ModelConfigServiceConfig = { + aliases: { + 'test-model': { + modelConfig: { + model: 'gemini-test', + generateContentConfig: { + temperature: 0.5, + }, + }, + }, + }, + overrides: [ + { + match: { model: 'test-model', isRetry: true }, + modelConfig: { + generateContentConfig: { + temperature: 1.0, + }, + }, + }, + ], + }; + const service = new ModelConfigService(config); + + // Normal request + const normal = service.getResolvedConfig({ model: 'test-model' }); + expect(normal.generateContentConfig.temperature).toBe(0.5); + + // Retry request + const retry = service.getResolvedConfig({ + model: 'test-model', + isRetry: true, + }); + expect(retry.generateContentConfig.temperature).toBe(1.0); + }); + + it('should prioritize retry overrides over generic overrides', () => { + const config: ModelConfigServiceConfig = { + aliases: { + 'test-model': { + modelConfig: { + model: 'gemini-test', + generateContentConfig: { + temperature: 0.5, + }, + }, + }, + }, + overrides: [ + // Generic override for this model + { + match: { model: 'test-model' }, + modelConfig: { + generateContentConfig: { + temperature: 0.7, + }, + }, + }, + // Retry-specific override + { + match: { model: 'test-model', isRetry: true }, + modelConfig: { + generateContentConfig: { + temperature: 1.0, + }, + }, + }, + ], + }; + const service = new ModelConfigService(config); + + // Normal request - hits generic override + const normal = service.getResolvedConfig({ model: 'test-model' }); + expect(normal.generateContentConfig.temperature).toBe(0.7); + + // Retry request - hits retry override (more specific) + const retry = service.getResolvedConfig({ + model: 'test-model', + isRetry: true, + }); + expect(retry.generateContentConfig.temperature).toBe(1.0); + }); + }); }); diff --git a/packages/core/src/services/modelConfigService.ts b/packages/core/src/services/modelConfigService.ts index 9c7a5f5a02..0a642b7e61 100644 --- a/packages/core/src/services/modelConfigService.ts +++ b/packages/core/src/services/modelConfigService.ts @@ -21,6 +21,11 @@ export interface ModelConfigKey { // model calls made by this specific subagent, and no others, while still // ensuring model configs are fully orthogonal to the agents who use them. overrideScope?: string; + + // Indicates whether this configuration request is happening during a retry attempt. + // This allows overrides to specify different settings (e.g., higher temperature) + // specifically for retry scenarios. + isRetry?: boolean; } export interface ModelConfig { @@ -32,6 +37,7 @@ export interface ModelConfigOverride { match: { model?: string; // Can be a model name or an alias overrideScope?: string; + isRetry?: boolean; }; modelConfig: ModelConfig; } @@ -45,6 +51,7 @@ export interface ModelConfigServiceConfig { aliases?: Record; customAliases?: Record; overrides?: ModelConfigOverride[]; + customOverrides?: ModelConfigOverride[]; } export type ResolvedModelConfig = _ResolvedModelConfig & { @@ -105,12 +112,18 @@ export class ModelConfigService { generateContentConfig: GenerateContentConfig; } { const config = this.config || {}; - const { aliases = {}, customAliases = {}, overrides = [] } = config; + const { + aliases = {}, + customAliases = {}, + overrides = [], + customOverrides = [], + } = config; const allAliases = { ...aliases, ...customAliases, ...this.runtimeAliases, }; + const allOverrides = [...overrides, ...customOverrides]; let baseModel: string | undefined = context.model; let resolvedConfig: GenerateContentConfig = {}; @@ -135,7 +148,7 @@ export class ModelConfigService { }; // Step 2: Override Application - const matches = overrides + const matches = allOverrides .map((override, index) => { const matchEntries = Object.entries(override.match); if (matchEntries.length === 0) { diff --git a/packages/core/src/services/test-data/resolved-aliases-retry.golden.json b/packages/core/src/services/test-data/resolved-aliases-retry.golden.json new file mode 100644 index 0000000000..451e50c5cd --- /dev/null +++ b/packages/core/src/services/test-data/resolved-aliases-retry.golden.json @@ -0,0 +1,222 @@ +{ + "base": { + "generateContentConfig": { + "temperature": 0, + "topP": 1 + } + }, + "chat-base": { + "generateContentConfig": { + "temperature": 1, + "topP": 0.95, + "thinkingConfig": { + "includeThoughts": true + }, + "topK": 64 + } + }, + "chat-base-2.5": { + "generateContentConfig": { + "temperature": 1, + "topP": 0.95, + "thinkingConfig": { + "includeThoughts": true, + "thinkingBudget": 8192 + }, + "topK": 64 + } + }, + "chat-base-3": { + "generateContentConfig": { + "temperature": 1, + "topP": 0.95, + "thinkingConfig": { + "includeThoughts": true, + "thinkingLevel": "HIGH" + }, + "topK": 64 + } + }, + "gemini-3-pro-preview": { + "model": "gemini-3-pro-preview", + "generateContentConfig": { + "temperature": 1, + "topP": 0.95, + "thinkingConfig": { + "includeThoughts": true, + "thinkingLevel": "HIGH" + }, + "topK": 64 + } + }, + "gemini-2.5-pro": { + "model": "gemini-2.5-pro", + "generateContentConfig": { + "temperature": 1, + "topP": 0.95, + "thinkingConfig": { + "includeThoughts": true, + "thinkingBudget": 8192 + }, + "topK": 64 + } + }, + "gemini-2.5-flash": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 1, + "topP": 0.95, + "thinkingConfig": { + "includeThoughts": true, + "thinkingBudget": 8192 + }, + "topK": 64 + } + }, + "gemini-2.5-flash-lite": { + "model": "gemini-2.5-flash-lite", + "generateContentConfig": { + "temperature": 1, + "topP": 0.95, + "thinkingConfig": { + "includeThoughts": true, + "thinkingBudget": 8192 + }, + "topK": 64 + } + }, + "gemini-2.5-flash-base": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 0, + "topP": 1 + } + }, + "classifier": { + "model": "gemini-2.5-flash-lite", + "generateContentConfig": { + "temperature": 0, + "topP": 1, + "maxOutputTokens": 1024, + "thinkingConfig": { + "thinkingBudget": 512 + } + } + }, + "prompt-completion": { + "model": "gemini-2.5-flash-lite", + "generateContentConfig": { + "temperature": 0.3, + "topP": 1, + "maxOutputTokens": 16000, + "thinkingConfig": { + "thinkingBudget": 0 + } + } + }, + "edit-corrector": { + "model": "gemini-2.5-flash-lite", + "generateContentConfig": { + "temperature": 0, + "topP": 1, + "thinkingConfig": { + "thinkingBudget": 0 + } + } + }, + "summarizer-default": { + "model": "gemini-2.5-flash-lite", + "generateContentConfig": { + "temperature": 0, + "topP": 1, + "maxOutputTokens": 2000 + } + }, + "summarizer-shell": { + "model": "gemini-2.5-flash-lite", + "generateContentConfig": { + "temperature": 0, + "topP": 1, + "maxOutputTokens": 2000 + } + }, + "web-search": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 0, + "topP": 1, + "tools": [ + { + "googleSearch": {} + } + ] + } + }, + "web-fetch": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 0, + "topP": 1, + "tools": [ + { + "urlContext": {} + } + ] + } + }, + "web-fetch-fallback": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 0, + "topP": 1 + } + }, + "loop-detection": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 0, + "topP": 1 + } + }, + "loop-detection-double-check": { + "model": "gemini-2.5-pro", + "generateContentConfig": { + "temperature": 0, + "topP": 1 + } + }, + "llm-edit-fixer": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 0, + "topP": 1 + } + }, + "next-speaker-checker": { + "model": "gemini-2.5-flash", + "generateContentConfig": { + "temperature": 0, + "topP": 1 + } + }, + "chat-compression-3-pro": { + "model": "gemini-3-pro-preview", + "generateContentConfig": {} + }, + "chat-compression-2.5-pro": { + "model": "gemini-2.5-pro", + "generateContentConfig": {} + }, + "chat-compression-2.5-flash": { + "model": "gemini-2.5-flash", + "generateContentConfig": {} + }, + "chat-compression-2.5-flash-lite": { + "model": "gemini-2.5-flash-lite", + "generateContentConfig": {} + }, + "chat-compression-default": { + "model": "gemini-2.5-pro", + "generateContentConfig": {} + } +} diff --git a/schemas/settings.schema.json b/schemas/settings.schema.json index 9b38000e5d..f1428fcbd4 100644 --- a/schemas/settings.schema.json +++ b/schemas/settings.schema.json @@ -442,7 +442,7 @@ "modelConfigs": { "title": "Model Configs", "description": "Model configurations.", - "markdownDescription": "Model configurations.\n\n- Category: `Model`\n- Requires restart: `no`\n- Default: `{\n \"aliases\": {\n \"base\": {\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"temperature\": 0,\n \"topP\": 1\n }\n }\n },\n \"chat-base\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"includeThoughts\": true\n },\n \"temperature\": 1,\n \"topP\": 0.95,\n \"topK\": 64\n }\n }\n },\n \"chat-base-2.5\": {\n \"extends\": \"chat-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"thinkingBudget\": 8192\n }\n }\n }\n },\n \"chat-base-3\": {\n \"extends\": \"chat-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"thinkingLevel\": \"HIGH\"\n }\n }\n }\n },\n \"gemini-3-pro-preview\": {\n \"extends\": \"chat-base-3\",\n \"modelConfig\": {\n \"model\": \"gemini-3-pro-preview\"\n }\n },\n \"gemini-2.5-pro\": {\n \"extends\": \"chat-base-2.5\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n },\n \"gemini-2.5-flash\": {\n \"extends\": \"chat-base-2.5\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash\"\n }\n },\n \"gemini-2.5-flash-lite\": {\n \"extends\": \"chat-base-2.5\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\"\n }\n },\n \"gemini-2.5-flash-base\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash\"\n }\n },\n \"classifier\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"maxOutputTokens\": 1024,\n \"thinkingConfig\": {\n \"thinkingBudget\": 512\n }\n }\n }\n },\n \"prompt-completion\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"temperature\": 0.3,\n \"maxOutputTokens\": 16000,\n \"thinkingConfig\": {\n \"thinkingBudget\": 0\n }\n }\n }\n },\n \"edit-corrector\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"thinkingBudget\": 0\n }\n }\n }\n },\n \"summarizer-default\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"maxOutputTokens\": 2000\n }\n }\n },\n \"summarizer-shell\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"maxOutputTokens\": 2000\n }\n }\n },\n \"web-search\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"tools\": [\n {\n \"googleSearch\": {}\n }\n ]\n }\n }\n },\n \"web-fetch\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"tools\": [\n {\n \"urlContext\": {}\n }\n ]\n }\n }\n },\n \"web-fetch-fallback\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"loop-detection\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"loop-detection-double-check\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n },\n \"llm-edit-fixer\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"next-speaker-checker\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"chat-compression-3-pro\": {\n \"modelConfig\": {\n \"model\": \"gemini-3-pro-preview\"\n }\n },\n \"chat-compression-2.5-pro\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n },\n \"chat-compression-2.5-flash\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash\"\n }\n },\n \"chat-compression-2.5-flash-lite\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\"\n }\n },\n \"chat-compression-default\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n }\n }\n}`", + "markdownDescription": "Model configurations.\n\n- Category: `Model`\n- Requires restart: `no`\n- Default: `{\n \"aliases\": {\n \"base\": {\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"temperature\": 0,\n \"topP\": 1\n }\n }\n },\n \"chat-base\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"includeThoughts\": true\n },\n \"temperature\": 1,\n \"topP\": 0.95,\n \"topK\": 64\n }\n }\n },\n \"chat-base-2.5\": {\n \"extends\": \"chat-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"thinkingBudget\": 8192\n }\n }\n }\n },\n \"chat-base-3\": {\n \"extends\": \"chat-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"thinkingLevel\": \"HIGH\"\n }\n }\n }\n },\n \"gemini-3-pro-preview\": {\n \"extends\": \"chat-base-3\",\n \"modelConfig\": {\n \"model\": \"gemini-3-pro-preview\"\n }\n },\n \"gemini-2.5-pro\": {\n \"extends\": \"chat-base-2.5\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n },\n \"gemini-2.5-flash\": {\n \"extends\": \"chat-base-2.5\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash\"\n }\n },\n \"gemini-2.5-flash-lite\": {\n \"extends\": \"chat-base-2.5\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\"\n }\n },\n \"gemini-2.5-flash-base\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash\"\n }\n },\n \"classifier\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"maxOutputTokens\": 1024,\n \"thinkingConfig\": {\n \"thinkingBudget\": 512\n }\n }\n }\n },\n \"prompt-completion\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"temperature\": 0.3,\n \"maxOutputTokens\": 16000,\n \"thinkingConfig\": {\n \"thinkingBudget\": 0\n }\n }\n }\n },\n \"edit-corrector\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"thinkingConfig\": {\n \"thinkingBudget\": 0\n }\n }\n }\n },\n \"summarizer-default\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"maxOutputTokens\": 2000\n }\n }\n },\n \"summarizer-shell\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\",\n \"generateContentConfig\": {\n \"maxOutputTokens\": 2000\n }\n }\n },\n \"web-search\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"tools\": [\n {\n \"googleSearch\": {}\n }\n ]\n }\n }\n },\n \"web-fetch\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"tools\": [\n {\n \"urlContext\": {}\n }\n ]\n }\n }\n },\n \"web-fetch-fallback\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"loop-detection\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"loop-detection-double-check\": {\n \"extends\": \"base\",\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n },\n \"llm-edit-fixer\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"next-speaker-checker\": {\n \"extends\": \"gemini-2.5-flash-base\",\n \"modelConfig\": {}\n },\n \"chat-compression-3-pro\": {\n \"modelConfig\": {\n \"model\": \"gemini-3-pro-preview\"\n }\n },\n \"chat-compression-2.5-pro\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n },\n \"chat-compression-2.5-flash\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash\"\n }\n },\n \"chat-compression-2.5-flash-lite\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-flash-lite\"\n }\n },\n \"chat-compression-default\": {\n \"modelConfig\": {\n \"model\": \"gemini-2.5-pro\"\n }\n }\n },\n \"overrides\": [\n {\n \"match\": {\n \"model\": \"chat-base\",\n \"isRetry\": true\n },\n \"modelConfig\": {\n \"generateContentConfig\": {\n \"temperature\": 1\n }\n }\n }\n ]\n}`", "default": { "aliases": { "base": { @@ -641,7 +641,20 @@ "model": "gemini-2.5-pro" } } - } + }, + "overrides": [ + { + "match": { + "model": "chat-base", + "isRetry": true + }, + "modelConfig": { + "generateContentConfig": { + "temperature": 1 + } + } + } + ] }, "type": "object", "properties": { @@ -858,6 +871,14 @@ "type": "object", "additionalProperties": true }, + "customOverrides": { + "title": "Custom Model Config Overrides", + "description": "Custom model config overrides. These are merged with (and added to) the built-in overrides.", + "markdownDescription": "Custom model config overrides. These are merged with (and added to) the built-in overrides.\n\n- Category: `Model`\n- Requires restart: `no`\n- Default: `[]`", + "default": [], + "type": "array", + "items": {} + }, "overrides": { "title": "Model Config Overrides", "description": "Apply specific configuration overrides based on matches, with a primary key of model (or alias). The most specific match will be used.",