Compare commits

...

30 Commits

Author SHA1 Message Date
pap-openai
77329e0f34 Merge branch 'main' into pap/model-selection 2025-08-02 22:18:11 +01:00
easong-openai
da2294548a Merge branch 'main' into pap/model-selection 2025-08-01 20:51:40 -07:00
pap
a5eea9048a Merge branch 'main' into pap/model-selection 2025-08-01 18:18:18 +01:00
pap
a2fe6336b6 fixing /model dropdown if no args provided 2025-08-01 18:09:21 +01:00
pap-openai
5d4ade38a4 Merge branch 'main' into pap/model-selection 2025-08-01 01:48:42 +01:00
pap-openai
4f2f4dcf6f Merge branch 'main' into pap/model-selection 2025-08-01 00:05:35 +01:00
pap
8dea0e4cd2 fuzzy is now a common lib + toml alphabetical order 2025-08-01 00:04:24 +01:00
pap
145688f019 linter 2025-07-31 23:09:55 +01:00
pap
1afa537148 remove useless code 2025-07-31 22:48:51 +01:00
pap
507f79deac linter 2025-07-31 22:34:12 +01:00
pap
d207169ea6 Merge branch 'main' into pap/model-selection 2025-07-31 22:29:45 +01:00
pap
4e2cf0bb7a can set non default models 2025-07-31 22:28:54 +01:00
pap
56e95f7ec7 scrollable model list 2025-07-31 21:54:42 +01:00
pap
fbc1ee7d62 new model popup 2025-07-31 15:00:08 +01:00
pap
f8e5b02320 desired_height for model selection 2025-07-31 13:35:59 +01:00
pap
02d16813bf Merge branch 'main' into pap/model-selection 2025-07-31 13:35:42 +01:00
pap-openai
7cf524d8b9 Merge branch 'main' into pap/model-selection 2025-07-30 22:49:44 +01:00
pap
40cf8a819c lint test 2025-07-30 22:28:02 +01:00
pap
55659e351c fixing merge 2025-07-30 22:00:13 +01:00
pap
2326f99e03 Merge branch 'main' into pap/model-selection 2025-07-30 21:49:14 +01:00
pap
91aa683ae9 cleaner code 2025-07-30 20:43:31 +01:00
pap
9dce0d7882 don't show session information at each reconfiguration 2025-07-30 20:32:01 +01:00
pap
661a4ff3f9 fix: self.emit_last_history_entry() 2025-07-30 20:07:14 +01:00
pap-openai
da3f90fdad Merge branch 'main' into pap/model-selection 2025-07-30 20:02:25 +01:00
pap-openai
fcbe6495f1 Merge branch 'main' into pap/model-selection 2025-07-30 18:06:38 +01:00
pap
34edf573d7 linter 2025-07-30 18:06:11 +01:00
pap
f78f8d8c7c fmt 2025-07-29 23:20:24 +01:00
pap
1836614c06 remove current model if search doesn't match 2025-07-28 23:25:35 +01:00
pap
9db5c7af9e remove preference ranking as we don't get models dynamically 2025-07-28 23:21:49 +01:00
pap
b294004ea9 adding /model 2025-07-28 23:06:30 +01:00
17 changed files with 810 additions and 126 deletions

20
codex-rs/Cargo.lock generated
View File

@@ -764,8 +764,8 @@ version = "0.0.0"
dependencies = [
"anyhow",
"clap",
"codex-common",
"ignore",
"nucleo-matcher",
"serde",
"serde_json",
"tokio",
@@ -869,6 +869,7 @@ dependencies = [
"strum 0.27.2",
"strum_macros 0.27.2",
"tokio",
"toml 0.8.23",
"tracing",
"tracing-appender",
"tracing-subscriber",
@@ -2803,16 +2804,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "nucleo-matcher"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
dependencies = [
"memchr",
"unicode-segmentation",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@@ -4779,6 +4770,7 @@ dependencies = [
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.11",
"toml_write",
"winnow",
]
@@ -4791,6 +4783,12 @@ dependencies = [
"winnow",
]
[[package]]
name = "toml_write"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
[[package]]
name = "toml_writer"
version = "1.0.2"

View File

@@ -0,0 +1,49 @@
/// Simple case-insensitive subsequence matcher used for fuzzy filtering.
///
/// Returns the indices (positions) of the matched characters in `haystack`
/// and a score where smaller is better. Currently, indices are byte offsets
/// from `char_indices()` of a lowercased copy of `haystack`.
///
/// Note: For ASCII inputs these indices align with character positions. If
/// extended Unicode inputs are used, be mindful of byte vs char indices.
pub fn fuzzy_match(haystack: &str, needle: &str) -> Option<(Vec<usize>, i32)> {
if needle.is_empty() {
return Some((Vec::new(), i32::MAX));
}
let h_lower = haystack.to_lowercase();
let n_lower = needle.to_lowercase();
let mut indices: Vec<usize> = Vec::with_capacity(n_lower.len());
let mut h_iter = h_lower.char_indices();
let mut last_pos: Option<usize> = None;
for ch in n_lower.chars() {
let mut found = None;
for (i, hc) in h_iter.by_ref() {
if hc == ch {
found = Some(i);
break;
}
}
if let Some(pos) = found {
indices.push(pos);
last_pos = Some(pos);
} else {
return None;
}
}
// Score: window length minus needle length (tighter is better), with a bonus for prefix match.
let first = *indices.first().unwrap_or(&0);
let last = last_pos.unwrap_or(first);
let window = (last as i32 - first as i32 + 1) - (n_lower.len() as i32);
let mut score = window.max(0);
if first == 0 {
score -= 100; // strong bonus for prefix match
}
Some((indices, score))
}
/// Convenience wrapper to get only the indices for a fuzzy match.
pub fn fuzzy_indices(haystack: &str, needle: &str) -> Option<Vec<usize>> {
fuzzy_match(haystack, needle).map(|(idx, _)| idx)
}

View File

@@ -23,3 +23,5 @@ mod sandbox_summary;
#[cfg(feature = "sandbox_summary")]
pub use sandbox_summary::summarize_sandbox_policy;
pub mod fuzzy_match;

View File

@@ -125,20 +125,8 @@ impl Codex {
let user_instructions = get_user_instructions(&config).await;
let configure_session = Op::ConfigureSession {
provider: config.model_provider.clone(),
model: config.model.clone(),
model_reasoning_effort: config.model_reasoning_effort,
model_reasoning_summary: config.model_reasoning_summary,
user_instructions,
base_instructions: config.base_instructions.clone(),
approval_policy: config.approval_policy,
sandbox_policy: config.sandbox_policy.clone(),
disable_response_storage: config.disable_response_storage,
notify: config.notify.clone(),
cwd: config.cwd.clone(),
resume_path: resume_path.clone(),
};
let configure_session =
config.to_configure_session_op(Some(config.model.clone()), user_instructions);
let config = Arc::new(config);
@@ -721,8 +709,14 @@ async fn submission_loop(
}
};
let client_config = {
let mut c = (*config).clone();
c.model = model.clone();
Arc::new(c)
};
let client = ModelClient::new(
config.clone(),
client_config,
auth.clone(),
provider.clone(),
model_reasoning_effort,

View File

@@ -14,6 +14,7 @@ use crate::model_provider_info::ModelProviderInfo;
use crate::model_provider_info::built_in_model_providers;
use crate::openai_model_info::get_model_info;
use crate::protocol::AskForApproval;
use crate::protocol::Op;
use crate::protocol::SandboxPolicy;
use dirs::home_dir;
use serde::Deserialize;
@@ -185,6 +186,32 @@ impl Config {
// Step 4: merge with the strongly-typed overrides.
Self::load_from_base_config_with_overrides(cfg, overrides, codex_home)
}
/// Construct an Op::ConfigureSession from this Config.
///
/// - `override_model`: when Some, use this model instead of `self.model`.
/// - `user_instructions`: pass-through instructions to embed in the session.
pub fn to_configure_session_op(
&self,
override_model: Option<String>,
user_instructions: Option<String>,
) -> Op {
let model = override_model.unwrap_or_else(|| self.model.clone());
Op::ConfigureSession {
provider: self.model_provider.clone(),
model,
model_reasoning_effort: self.model_reasoning_effort,
model_reasoning_summary: self.model_reasoning_summary,
user_instructions,
base_instructions: self.base_instructions.clone(),
approval_policy: self.approval_policy,
sandbox_policy: self.sandbox_policy.clone(),
disable_response_storage: self.disable_response_storage,
notify: self.notify.clone(),
cwd: self.cwd.clone(),
resume_path: self.experimental_resume.clone(),
}
}
}
/// Read `CODEX_HOME/config.toml` and return it as a generic TOML value. Returns

View File

@@ -32,7 +32,7 @@ pub use model_provider_info::ModelProviderInfo;
pub use model_provider_info::WireApi;
pub use model_provider_info::built_in_model_providers;
mod models;
mod openai_model_info;
pub mod openai_model_info;
mod openai_tools;
pub mod plan_tool;
mod project_doc;

View File

@@ -69,3 +69,8 @@ pub(crate) fn get_model_info(name: &str) -> Option<ModelInfo> {
_ => None,
}
}
/// Return a curated list of commonly-used OpenAI model names for selection UIs.
pub fn get_all_model_names() -> Vec<&'static str> {
vec!["codex-mini-latest", "o3", "o4-mini", "gpt-4.1", "gpt-4o"]
}

View File

@@ -14,8 +14,8 @@ path = "src/lib.rs"
[dependencies]
anyhow = "1"
clap = { version = "4", features = ["derive"] }
codex-common = { path = "../common" }
ignore = "0.4.23"
nucleo-matcher = "0.3.1"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0.110"
tokio = { version = "1", features = ["full"] }

View File

@@ -1,14 +1,9 @@
use codex_common::fuzzy_match::fuzzy_indices as common_fuzzy_indices;
use codex_common::fuzzy_match::fuzzy_match as common_fuzzy_match;
use ignore::WalkBuilder;
use ignore::overrides::OverrideBuilder;
use nucleo_matcher::Matcher;
use nucleo_matcher::Utf32Str;
use nucleo_matcher::pattern::AtomKind;
use nucleo_matcher::pattern::CaseMatching;
use nucleo_matcher::pattern::Normalization;
use nucleo_matcher::pattern::Pattern;
use serde::Serialize;
use std::cell::UnsafeCell;
use std::cmp::Reverse;
use std::collections::BinaryHeap;
use std::num::NonZero;
use std::path::Path;
@@ -24,17 +19,13 @@ pub use cli::Cli;
/// A single match result returned from the search.
///
/// * `score` Relevance score returned by `nucleo_matcher`.
/// * `score` Relevance score from the fuzzy matcher (smaller is better).
/// * `path` Path to the matched file (relative to the search directory).
/// * `indices` Optional list of character indices that matched the query.
/// These are only filled when the caller of [`run`] sets
/// `compute_indices` to `true`. The indices vector follows the
/// guidance from `nucleo_matcher::Pattern::indices`: they are
/// unique and sorted in ascending order so that callers can use
/// them directly for highlighting.
/// * `indices` Optional list of character positions that matched the query.
/// These are unique and sorted so callers can use them directly for highlighting.
#[derive(Debug, Clone, Serialize)]
pub struct FileMatch {
pub score: u32,
pub score: i32,
pub path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub indices: Option<Vec<u32>>, // Sorted & deduplicated when present
@@ -130,7 +121,6 @@ pub fn run(
cancel_flag: Arc<AtomicBool>,
compute_indices: bool,
) -> anyhow::Result<FileSearchResults> {
let pattern = create_pattern(pattern_text);
// Create one BestMatchesList per worker thread so that each worker can
// operate independently. The results across threads will be merged when
// the traversal is complete.
@@ -139,13 +129,7 @@ pub fn run(
num_best_matches_lists,
} = create_worker_count(threads);
let best_matchers_per_worker: Vec<UnsafeCell<BestMatchesList>> = (0..num_best_matches_lists)
.map(|_| {
UnsafeCell::new(BestMatchesList::new(
limit.get(),
pattern.clone(),
Matcher::new(nucleo_matcher::Config::DEFAULT),
))
})
.map(|_| UnsafeCell::new(BestMatchesList::new(limit.get(), pattern_text.to_string())))
.collect();
// Use the same tree-walker library that ripgrep uses. We use it directly so
@@ -220,47 +204,33 @@ pub fn run(
}
// Merge results across best_matchers_per_worker.
let mut global_heap: BinaryHeap<Reverse<(u32, String)>> = BinaryHeap::new();
let mut global_heap: BinaryHeap<(i32, String)> = BinaryHeap::new();
let mut total_match_count = 0;
for best_list_cell in best_matchers_per_worker.iter() {
let best_list = unsafe { &*best_list_cell.get() };
total_match_count += best_list.num_matches;
for &Reverse((score, ref line)) in best_list.binary_heap.iter() {
for &(score, ref line) in best_list.binary_heap.iter() {
if global_heap.len() < limit.get() {
global_heap.push(Reverse((score, line.clone())));
} else if let Some(min_element) = global_heap.peek() {
if score > min_element.0.0 {
global_heap.push((score, line.clone()));
} else if let Some(&(worst_score, _)) = global_heap.peek() {
if score < worst_score {
global_heap.pop();
global_heap.push(Reverse((score, line.clone())));
global_heap.push((score, line.clone()));
}
}
}
}
let mut raw_matches: Vec<(u32, String)> = global_heap.into_iter().map(|r| r.0).collect();
let mut raw_matches: Vec<(i32, String)> = global_heap.into_iter().collect();
sort_matches(&mut raw_matches);
// Transform into `FileMatch`, optionally computing indices.
let mut matcher = if compute_indices {
Some(Matcher::new(nucleo_matcher::Config::DEFAULT))
} else {
None
};
let matches: Vec<FileMatch> = raw_matches
.into_iter()
.map(|(score, path)| {
let indices = if compute_indices {
let mut buf = Vec::<char>::new();
let haystack: Utf32Str<'_> = Utf32Str::new(&path, &mut buf);
let mut idx_vec: Vec<u32> = Vec::new();
if let Some(ref mut m) = matcher {
// Ignore the score returned from indices we already have `score`.
pattern.indices(haystack, m, &mut idx_vec);
}
idx_vec.sort_unstable();
idx_vec.dedup();
Some(idx_vec)
common_fuzzy_indices(&path, pattern_text)
.map(|v| v.into_iter().map(|i| i as u32).collect())
} else {
None
};
@@ -279,9 +249,9 @@ pub fn run(
})
}
/// Sort matches in-place by descending score, then ascending path.
fn sort_matches(matches: &mut [(u32, String)]) {
matches.sort_by(|a, b| match b.0.cmp(&a.0) {
/// Sort matches in-place by ascending score, then ascending path.
fn sort_matches(matches: &mut [(i32, String)]) {
matches.sort_by(|a, b| match a.0.cmp(&b.0) {
std::cmp::Ordering::Equal => a.1.cmp(&b.1),
other => other,
});
@@ -291,39 +261,31 @@ fn sort_matches(matches: &mut [(u32, String)]) {
struct BestMatchesList {
max_count: usize,
num_matches: usize,
pattern: Pattern,
matcher: Matcher,
binary_heap: BinaryHeap<Reverse<(u32, String)>>,
/// Internal buffer for converting strings to UTF-32.
utf32buf: Vec<char>,
pattern: String,
binary_heap: BinaryHeap<(i32, String)>,
}
impl BestMatchesList {
fn new(max_count: usize, pattern: Pattern, matcher: Matcher) -> Self {
fn new(max_count: usize, pattern: String) -> Self {
Self {
max_count,
num_matches: 0,
pattern,
matcher,
binary_heap: BinaryHeap::new(),
utf32buf: Vec::<char>::new(),
}
}
fn insert(&mut self, line: &str) {
let haystack: Utf32Str<'_> = Utf32Str::new(line, &mut self.utf32buf);
if let Some(score) = self.pattern.score(haystack, &mut self.matcher) {
// In the tests below, we verify that score() returns None for a
// non-match, so we can categorically increment the count here.
if let Some((_indices, score)) = common_fuzzy_match(line, &self.pattern) {
// Count all matches; non-matches return None above.
self.num_matches += 1;
if self.binary_heap.len() < self.max_count {
self.binary_heap.push(Reverse((score, line.to_string())));
} else if let Some(min_element) = self.binary_heap.peek() {
if score > min_element.0.0 {
self.binary_heap.push((score, line.to_string()));
} else if let Some(&(worst_score, _)) = self.binary_heap.peek() {
if score < worst_score {
self.binary_heap.pop();
self.binary_heap.push(Reverse((score, line.to_string())));
self.binary_heap.push((score, line.to_string()));
}
}
}
@@ -354,28 +316,16 @@ fn create_worker_count(num_workers: NonZero<usize>) -> WorkerCount {
}
}
fn create_pattern(pattern: &str) -> Pattern {
Pattern::new(
pattern,
CaseMatching::Smart,
Normalization::Smart,
AtomKind::Fuzzy,
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn verify_score_is_none_for_non_match() {
let mut utf32buf = Vec::<char>::new();
let line = "hello";
let mut matcher = Matcher::new(nucleo_matcher::Config::DEFAULT);
let haystack: Utf32Str<'_> = Utf32Str::new(line, &mut utf32buf);
let pattern = create_pattern("zzz");
let score = pattern.score(haystack, &mut matcher);
assert_eq!(score, None);
fn verify_no_match_does_not_increment_or_push() {
let mut list = BestMatchesList::new(5, "zzz".to_string());
list.insert("hello");
assert_eq!(list.num_matches, 0);
assert_eq!(list.binary_heap.len(), 0);
}
#[test]
@@ -388,11 +338,11 @@ mod tests {
sort_matches(&mut matches);
// Highest score first; ties broken alphabetically.
// Lowest score first; ties broken alphabetically.
let expected = vec![
(90, "zzz".to_string()),
(100, "a_path".to_string()),
(100, "b_path".to_string()),
(90, "zzz".to_string()),
];
assert_eq!(matches, expected);

View File

@@ -55,6 +55,7 @@ tokio = { version = "1", features = [
"rt-multi-thread",
"signal",
] }
toml = "0.8"
tracing = { version = "0.1.41", features = ["log"] }
tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }

View File

@@ -45,6 +45,28 @@ enum AppState<'a> {
GitWarning { screen: GitWarningScreen },
}
/// Strip a single pair of surrounding quotes from the provided string if present.
/// Supports straight and common curly quotes: '…', "…", ‘…’, “…”.
pub fn strip_surrounding_quotes(s: &str) -> &str {
// Opening/closing pairs (note curly quotes differ on each side)
const QUOTE_PAIRS: &[(char, char)] = &[('"', '"'), ('\'', '\''), ('“', '”'), ('', '')];
let t = s.trim();
if t.len() < 2 {
return t;
}
for &(open, close) in QUOTE_PAIRS {
if t.starts_with(open) && t.ends_with(close) {
let start = open.len_utf8();
let end = t.len() - close.len_utf8();
return &t[start..end];
}
}
t
}
pub(crate) struct App<'a> {
app_event_tx: AppEventSender,
app_event_rx: Receiver<AppEvent>,
@@ -279,6 +301,16 @@ impl App<'_> {
AppEvent::ExitRequest => {
break;
}
AppEvent::SelectModel(model) => {
if let AppState::Chat { widget } = &mut self.app_state {
widget.update_model_and_reconfigure(model);
}
}
AppEvent::OpenModelSelector => {
if let AppState::Chat { widget } = &mut self.app_state {
widget.show_model_selector();
}
}
AppEvent::CodexOp(op) => match &mut self.app_state {
AppState::Chat { widget } => widget.submit_op(op),
AppState::GitWarning { .. } => {}
@@ -368,6 +400,37 @@ impl App<'_> {
),
}));
}
SlashCommand::Model => {
// Open the model selector when `/model` has no arguments.
if let AppState::Chat { widget } = &mut self.app_state {
widget.show_model_selector();
}
}
},
AppEvent::DispatchCommandWithArgs(command, args) => match command {
SlashCommand::Model => {
let arg = args.trim();
if let AppState::Chat { widget } = &mut self.app_state {
// Normalize commonly quoted inputs like \"o3\" or 'o3' or “o3”.
let normalized = strip_surrounding_quotes(arg).trim().to_string();
if !normalized.is_empty() {
widget.update_model_and_reconfigure(normalized);
}
}
}
#[cfg(debug_assertions)]
SlashCommand::TestApproval => {
// Ignore args; forward to the existing no-args handler
self.app_event_tx.send(AppEvent::DispatchCommand(command));
}
SlashCommand::New
| SlashCommand::Quit
| SlashCommand::Diff
| SlashCommand::Compact => {
// For other commands, fall back to existing handling.
// We can ignore args for now.
self.app_event_tx.send(AppEvent::DispatchCommand(command));
}
},
AppEvent::StartFileSearch(query) => {
self.file_search.on_user_query(query);
@@ -498,3 +561,44 @@ impl App<'_> {
}
}
}
#[cfg(test)]
mod tests {
use super::strip_surrounding_quotes;
#[test]
fn strip_surrounding_quotes_cases() {
let cases = vec![
("o3", "o3"),
("\"codex-mini-latest\"", "codex-mini-latest"),
("another_model", "another_model"),
];
for (input, expected) in cases {
assert_eq!(strip_surrounding_quotes(input), expected.to_string());
}
}
#[test]
fn model_command_args_extraction_and_normalization() {
let cases = vec![
("/model", "", ""),
("/model o3", "o3", "o3"),
("/model another_model", "another_model", "another_model"),
];
for (line, raw_expected, norm_expected) in cases {
// Extract raw args as in chat_composer
let raw = if let Some(stripped) = line.strip_prefix('/') {
let token = stripped.trim_start();
let cmd_token = token.split_whitespace().next().unwrap_or("");
let rest = &token[cmd_token.len()..];
rest.trim_start().to_string()
} else {
String::new()
};
assert_eq!(raw, raw_expected, "raw args for '{line}'");
// Normalize as in app dispatch logic
let normalized = strip_surrounding_quotes(&raw).trim().to_string();
assert_eq!(normalized, norm_expected, "normalized args for '{line}'");
}
}
}

View File

@@ -34,6 +34,10 @@ pub(crate) enum AppEvent {
/// layer so it can be handled centrally.
DispatchCommand(SlashCommand),
/// Dispatch a recognized slash command along with the raw argument string
/// following the command on the first line.
DispatchCommandWithArgs(SlashCommand, String),
/// Kick off an asynchronous file search for the given query (text after
/// the `@`). Previous searches may be cancelled by the app layer so there
/// is at most one in-flight search.
@@ -48,4 +52,10 @@ pub(crate) enum AppEvent {
},
InsertHistory(Vec<Line<'static>>),
/// User selected a model from the model-selection dropdown.
SelectModel(String),
/// Request the app to open the model selector (populate options and show popup).
OpenModelSelector,
}

View File

@@ -19,9 +19,11 @@ use tui_textarea::TextArea;
use super::chat_composer_history::ChatComposerHistory;
use super::command_popup::CommandPopup;
use super::file_search_popup::FileSearchPopup;
use super::model_selection_popup::ModelSelectionPopup;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use crate::slash_command::SlashCommand;
use codex_file_search::FileMatch;
const BASE_PLACEHOLDER_TEXT: &str = "...";
@@ -52,6 +54,7 @@ enum ActivePopup {
None,
Command(CommandPopup),
File(FileSearchPopup),
Model(ModelSelectionPopup),
}
impl ChatComposer<'_> {
@@ -87,6 +90,7 @@ impl ChatComposer<'_> {
ActivePopup::None => 1u16,
ActivePopup::Command(c) => c.calculate_required_height(),
ActivePopup::File(c) => c.calculate_required_height(),
ActivePopup::Model(c) => c.calculate_required_height(),
}
}
@@ -182,20 +186,47 @@ impl ChatComposer<'_> {
self.update_border(has_focus);
}
/// Open or update the model-selection popup with the provided options.
pub(crate) fn open_model_selector(&mut self, current_model: &str, options: Vec<String>) {
match &mut self.active_popup {
ActivePopup::Model(popup) => {
popup.set_options(current_model, options);
}
_ => {
self.active_popup =
ActivePopup::Model(ModelSelectionPopup::new(current_model, options));
}
}
// Initialize/update the query from the composer.
self.sync_model_popup();
}
/// Handle a key event coming from the main UI.
pub fn handle_key_event(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
let result = match &mut self.active_popup {
ActivePopup::Command(_) => self.handle_key_event_with_slash_popup(key_event),
ActivePopup::File(_) => self.handle_key_event_with_file_popup(key_event),
ActivePopup::Model(_) => self.handle_key_event_with_model_popup(key_event),
ActivePopup::None => self.handle_key_event_without_popup(key_event),
};
// Update (or hide/show) popup after processing the key.
self.sync_command_popup();
if matches!(self.active_popup, ActivePopup::Command(_)) {
self.dismissed_file_popup_token = None;
} else {
self.sync_file_search_popup();
match &self.active_popup {
ActivePopup::Model(_) => {
// Only keep model popup in sync when active; do not interfere with other popups.
self.sync_model_popup();
}
ActivePopup::Command(_) => {
self.sync_command_popup();
// When slash popup active, suppress file popup.
self.dismissed_file_popup_token = None;
}
_ => {
self.sync_command_popup();
if !matches!(self.active_popup, ActivePopup::Command(_)) {
self.sync_file_search_popup();
}
}
}
result
@@ -244,10 +275,39 @@ impl ChatComposer<'_> {
ctrl: false,
} => {
if let Some(cmd) = popup.selected_command() {
// Send command to the app layer.
self.app_event_tx.send(AppEvent::DispatchCommand(*cmd));
// Extract arguments after the command from the first line.
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
// Clear textarea so no residual text remains.
let args = if let Some((_, args)) =
Self::parse_slash_command_and_args_from_line(first_line)
{
args
} else {
String::new()
};
// Special-case: for `/model` with no arguments, keep the composer as "/model "
// so the model selector opens and the user can type to filter.
if *cmd == SlashCommand::Model && args.trim().is_empty() {
// Replace the entire input with "/model " (with a trailing space).
self.textarea.select_all();
self.textarea.cut();
let _ = self.textarea.insert_str(format!("/{} ", cmd.command()));
// Hide the slash-command popup; sync logic will open the model selector.
self.active_popup = ActivePopup::None;
return (InputResult::None, true);
}
// Send command + args to the app layer.
self.app_event_tx
.send(AppEvent::DispatchCommandWithArgs(*cmd, args));
// Clear textarea so no residual text remains
self.textarea.select_all();
self.textarea.cut();
@@ -305,6 +365,80 @@ impl ChatComposer<'_> {
}
}
/// Handle key events when model selection popup is visible.
fn handle_key_event_with_model_popup(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
let ActivePopup::Model(popup) = &mut self.active_popup else {
unreachable!();
};
match key_event.into() {
Input { key: Key::Up, .. } => {
popup.move_up();
(InputResult::None, true)
}
Input { key: Key::Down, .. } => {
popup.move_down();
(InputResult::None, true)
}
Input { key: Key::Esc, .. } => {
// Hide model popup; keep composer content unchanged.
self.active_popup = ActivePopup::None;
(InputResult::None, true)
}
Input {
key: Key::Enter,
ctrl: false,
alt: false,
shift: false,
}
| Input { key: Key::Tab, .. } => {
if let Some(model) = popup.selected_model() {
self.app_event_tx.send(AppEvent::SelectModel(model));
// Clear composer input and close the popup.
self.textarea.select_all();
self.textarea.cut();
self.pending_pastes.clear();
self.active_popup = ActivePopup::None;
return (InputResult::None, true);
}
// No selection in the list: treat the typed argument as the model name.
// Extract arguments after `/model` from the first line.
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
let args = if let Some((cmd_token, args)) =
Self::parse_slash_command_and_args_from_line(first_line)
{
if cmd_token == SlashCommand::Model.command() {
args
} else {
String::new()
}
} else {
String::new()
};
if !args.trim().is_empty() {
// Dispatch as a command with args so normalization is applied centrally.
self.app_event_tx
.send(AppEvent::DispatchCommandWithArgs(SlashCommand::Model, args));
// Clear composer input and close the popup.
self.textarea.select_all();
self.textarea.cut();
self.pending_pastes.clear();
self.active_popup = ActivePopup::None;
return (InputResult::None, true);
}
(InputResult::None, false)
}
input => self.handle_input_basic(input),
}
}
/// Extract the `@token` that the cursor is currently positioned on, if any.
///
/// The returned string **does not** include the leading `@`.
@@ -590,19 +724,47 @@ impl ChatComposer<'_> {
.unwrap_or("");
let input_starts_with_slash = first_line.starts_with('/');
// Special handling: if the user typed `/model ` (with a space), open the model selector
// and do not show the slash-command popup.
let should_open_model_selector = if let Some(stripped) = first_line.strip_prefix('/') {
let token = stripped.trim_start();
let cmd_token = token.split_whitespace().next().unwrap_or("");
if cmd_token == SlashCommand::Model.command() {
let rest = &token[cmd_token.len()..];
// Show model popup as soon as a whitespace after the command is present.
rest.chars().next().is_some_and(|c| c.is_whitespace())
} else {
false
}
} else {
false
};
match &mut self.active_popup {
ActivePopup::Command(popup) => {
if input_starts_with_slash {
popup.on_composer_text_change(first_line.to_string());
if should_open_model_selector {
// Switch away from command popup and request opening the model selector.
self.active_popup = ActivePopup::None;
self.app_event_tx.send(AppEvent::OpenModelSelector);
} else {
popup.on_composer_text_change(first_line.to_string());
}
} else {
self.active_popup = ActivePopup::None;
}
}
_ => {
if input_starts_with_slash {
let mut command_popup = CommandPopup::new();
command_popup.on_composer_text_change(first_line.to_string());
self.active_popup = ActivePopup::Command(command_popup);
if should_open_model_selector {
// Request the app to open the model selector; popup will render once options arrive.
self.app_event_tx.send(AppEvent::OpenModelSelector);
} else {
let mut command_popup = CommandPopup::new();
command_popup.on_composer_text_change(first_line.to_string());
self.active_popup = ActivePopup::Command(command_popup);
}
}
}
}
@@ -644,6 +806,48 @@ impl ChatComposer<'_> {
self.dismissed_file_popup_token = None;
}
/// Synchronize the model-selection popup filter with the current composer text.
/// When the first line starts with `/model`, everything after the command becomes the query.
fn sync_model_popup(&mut self) {
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
// Expect `/model` as the first token on the first line.
if let Some((cmd_token, args)) = Self::parse_slash_command_and_args_from_line(first_line) {
if cmd_token == SlashCommand::Model.command() {
if let ActivePopup::Model(popup) = &mut self.active_popup {
popup.set_query(&args);
}
return;
}
}
// Not a `/model` line anymore; hide the model popup if visible.
if matches!(self.active_popup, ActivePopup::Model(_)) {
self.active_popup = ActivePopup::None;
}
}
/// Parse a leading "/command" and return (command_token, args_trimmed_left).
/// Returns None if the line does not start with a slash or the command is empty.
fn parse_slash_command_and_args_from_line(line: &str) -> Option<(String, String)> {
if let Some(stripped) = line.strip_prefix('/') {
let token = stripped.trim_start();
let cmd_token = token.split_whitespace().next().unwrap_or("");
if cmd_token.is_empty() {
return None;
}
let rest = &token[cmd_token.len()..];
Some((cmd_token.to_string(), rest.trim_start().to_string()))
} else {
None
}
}
fn update_border(&mut self, has_focus: bool) {
let border_style = if has_focus {
Style::default().fg(Color::Cyan)
@@ -705,6 +909,26 @@ impl WidgetRef for &ChatComposer<'_> {
popup.render(popup_rect, buf);
self.textarea.render(textarea_rect, buf);
}
ActivePopup::Model(popup) => {
let popup_height = popup.calculate_required_height();
let popup_height = popup_height.min(area.height);
let textarea_rect = Rect {
x: area.x,
y: area.y,
width: area.width,
height: area.height.saturating_sub(popup_height),
};
let popup_rect = Rect {
x: area.x,
y: area.y + textarea_rect.height,
width: area.width,
height: popup_height,
};
popup.render(popup_rect, buf);
self.textarea.render(textarea_rect, buf);
}
ActivePopup::None => {
let mut textarea_rect = area;
textarea_rect.height = textarea_rect.height.saturating_sub(1);

View File

@@ -18,6 +18,7 @@ mod chat_composer;
mod chat_composer_history;
mod command_popup;
mod file_search_popup;
mod model_selection_popup;
mod status_indicator_view;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -70,6 +71,12 @@ impl BottomPane<'_> {
}
}
/// Show the model-selection popup in the composer.
pub(crate) fn show_model_selector(&mut self, current_model: &str, options: Vec<String>) {
self.composer.open_model_selector(current_model, options);
self.request_redraw();
}
pub fn desired_height(&self, width: u16) -> u16 {
self.active_view
.as_ref()

View File

@@ -0,0 +1,247 @@
use codex_common::fuzzy_match::fuzzy_indices;
use codex_common::fuzzy_match::fuzzy_match;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::prelude::Constraint;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::text::Line;
use ratatui::text::Span;
use ratatui::widgets::Block;
use ratatui::widgets::BorderType;
use ratatui::widgets::Borders;
use ratatui::widgets::Cell;
use ratatui::widgets::Row;
use ratatui::widgets::Table;
use ratatui::widgets::Widget;
use ratatui::widgets::WidgetRef;
/// Maximum number of options shown in the popup.
const MAX_RESULTS: usize = 8;
/// Visual state for the model-selection popup.
pub(crate) struct ModelSelectionPopup {
/// The current model (pinned and color-coded when visible).
current_model: String,
/// All available model options (deduplicated externally as needed).
options: Vec<String>,
/// Current filter query (derived from the composer, e.g. after `/model`).
query: String,
/// Currently selected index among the visible rows (if any).
selected_idx: Option<usize>,
}
impl ModelSelectionPopup {
pub(crate) fn new(current_model: &str, options: Vec<String>) -> Self {
Self {
current_model: current_model.to_string(),
options,
query: String::new(),
selected_idx: None,
}
}
/// Update the current model and option list. Resets/clamps selection as needed.
pub(crate) fn set_options(&mut self, current_model: &str, options: Vec<String>) {
self.current_model = current_model.to_string();
self.options = options;
let visible_len = self.visible_rows().len();
self.selected_idx = match visible_len {
0 => None,
_ => Some(self.selected_idx.unwrap_or(0).min(visible_len - 1)),
};
}
/// Update the fuzzy filter query.
pub(crate) fn set_query(&mut self, query: &str) {
if self.query == query {
return;
}
self.query.clear();
self.query.push_str(query);
// Reset/clamp selection based on new filtered list.
let visible_len = self.visible_rows().len();
self.selected_idx = match visible_len {
0 => None,
_ => Some(0),
};
}
/// Move selection cursor up.
pub(crate) fn move_up(&mut self) {
if let Some(idx) = self.selected_idx {
if idx > 0 {
self.selected_idx = Some(idx - 1);
}
} else if !self.visible_rows().is_empty() {
self.selected_idx = Some(0);
}
}
/// Move selection cursor down.
pub(crate) fn move_down(&mut self) {
let len = self.visible_rows().len();
if len == 0 {
self.selected_idx = None;
return;
}
match self.selected_idx {
Some(idx) if idx + 1 < len => self.selected_idx = Some(idx + 1),
None => self.selected_idx = Some(0),
_ => {}
}
}
/// Currently selected model name, if any.
pub(crate) fn selected_model(&self) -> Option<String> {
let rows = self.visible_rows();
self.selected_idx.and_then(|idx| {
rows.get(idx)
.map(|DisplayRow::Model { name, .. }| name.clone())
})
}
/// Preferred height (rows) including border.
pub(crate) fn calculate_required_height(&self) -> u16 {
self.visible_rows().len().clamp(1, MAX_RESULTS) as u16
}
/// Compute rows to display applying fuzzy filtering and pinning current model.
fn visible_rows(&self) -> Vec<DisplayRow> {
// Build candidate list excluding the current model.
let mut others: Vec<&str> = self
.options
.iter()
.map(|s| s.as_str())
.filter(|m| *m != self.current_model)
.collect();
// Keep original ordering for non-search.
if self.query.trim().is_empty() {
let mut rows: Vec<DisplayRow> = Vec::new();
// Current model first.
rows.push(DisplayRow::Model {
name: self.current_model.clone(),
match_indices: None,
is_current: true,
});
for name in others.drain(..) {
rows.push(DisplayRow::Model {
name: name.to_string(),
match_indices: None,
is_current: false,
});
}
return rows;
}
// Searching: include current model only if it matches.
let mut rows: Vec<DisplayRow> = Vec::new();
if let Some(indices) = fuzzy_indices(&self.current_model, &self.query) {
rows.push(DisplayRow::Model {
name: self.current_model.clone(),
match_indices: Some(indices),
is_current: true,
});
}
// Fuzzy-match the rest and sort by score, then name, then match tightness.
let mut matches: Vec<(String, Vec<usize>, i32)> = Vec::new();
for name in others.into_iter() {
if let Some((indices, score)) = fuzzy_match(name, &self.query) {
matches.push((name.to_string(), indices, score));
}
}
matches.sort_by(|(a_name, a_idx, a_score), (b_name, b_idx, b_score)| {
a_score
.cmp(b_score)
.then_with(|| a_name.cmp(b_name))
.then_with(|| a_idx.len().cmp(&b_idx.len()))
});
for (name, indices, _score) in matches.into_iter() {
if name != self.current_model {
rows.push(DisplayRow::Model {
name,
match_indices: Some(indices),
is_current: false,
});
}
}
rows
}
}
/// Row in the model popup.
enum DisplayRow {
Model {
name: String,
match_indices: Option<Vec<usize>>, // indices to bold (char positions)
is_current: bool,
},
}
impl WidgetRef for &ModelSelectionPopup {
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
let rows_all = self.visible_rows();
let mut rows: Vec<Row> = Vec::new();
if rows_all.is_empty() {
rows.push(Row::new(vec![Cell::from(Line::from(Span::styled(
"no matches",
Style::default().add_modifier(Modifier::ITALIC | Modifier::DIM),
)))]));
} else {
for (i, row) in rows_all.into_iter().take(MAX_RESULTS).enumerate() {
match row {
DisplayRow::Model {
name,
match_indices,
is_current,
} => {
// Highlight fuzzy indices when present.
let mut spans: Vec<Span> = Vec::with_capacity(name.len());
if let Some(idxs) = match_indices.as_ref() {
let mut idx_iter = idxs.iter().peekable();
for (char_idx, ch) in name.chars().enumerate() {
let mut style = Style::default();
if idx_iter.peek().is_some_and(|next| **next == char_idx) {
idx_iter.next();
style = style.add_modifier(Modifier::BOLD);
}
spans.push(Span::styled(ch.to_string(), style));
}
} else {
spans.push(Span::raw(name.clone()));
}
let mut cell = Cell::from(Line::from(spans));
if Some(i) == self.selected_idx {
cell = cell.style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
} else if is_current {
cell = cell.style(Style::default().fg(Color::Cyan));
}
rows.push(Row::new(vec![cell]));
}
}
}
}
let table = Table::new(rows, vec![Constraint::Percentage(100)])
.block(
Block::default()
.borders(Borders::LEFT)
.border_type(BorderType::QuadrantOutside)
.border_style(Style::default().fg(Color::DarkGray)),
)
.widths([Constraint::Percentage(100)]);
table.render(area, buf);
}
}

View File

@@ -6,6 +6,8 @@ use std::time::Duration;
use codex_core::codex_wrapper::CodexConversation;
use codex_core::codex_wrapper::init_codex;
use codex_core::config::Config;
use codex_core::config::ConfigToml;
use codex_core::openai_model_info::get_all_model_names;
use codex_core::protocol::AgentMessageDeltaEvent;
use codex_core::protocol::AgentMessageEvent;
use codex_core::protocol::AgentReasoningDeltaEvent;
@@ -64,6 +66,7 @@ pub(crate) struct ChatWidget<'a> {
// We wait for the final AgentMessage event and then emit the full text
// at once into scrollback so the history contains a single message.
answer_buffer: String,
new_session: bool,
running_commands: HashMap<String, RunningCommand>,
}
@@ -151,6 +154,7 @@ impl ChatWidget<'_> {
token_usage: TokenUsage::default(),
reasoning_buffer: String::new(),
answer_buffer: String::new(),
new_session: true,
running_commands: HashMap::new(),
}
}
@@ -224,8 +228,12 @@ impl ChatWidget<'_> {
EventMsg::SessionConfigured(event) => {
self.bottom_pane
.set_history_metadata(event.history_log_id, event.history_entry_count);
// Record session information at the top of the conversation.
self.add_to_history(HistoryCell::new_session_info(&self.config, event, true));
if self.new_session {
self.add_to_history(HistoryCell::new_session_info(&self.config, event, true));
self.new_session = false;
}
if let Some(user_message) = self.initial_user_message.take() {
// If the user provided an initial message, add it to the
@@ -504,6 +512,62 @@ impl ChatWidget<'_> {
&self.token_usage
}
/// Open the model selection view in the bottom pane.
pub(crate) fn show_model_selector(&mut self) {
let current = self.config.model.clone();
let mut options = get_all_model_names()
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<_>>();
// Always include the currently configured model (covers custom values).
options.push(current.clone());
// Append any models found in config.toml profiles and top-level model.
let config_path = self.config.codex_home.join("config.toml");
if let Ok(contents) = std::fs::read_to_string(&config_path) {
if let Ok(cfg) = toml::from_str::<ConfigToml>(&contents) {
let mut config_models: Vec<String> = Vec::new();
if let Some(m) = cfg.model {
config_models.push(m);
}
for (_name, profile) in cfg.profiles.into_iter() {
if let Some(m) = profile.model {
config_models.push(m);
}
}
// Alphabetical ordering for config models.
config_models.sort();
options.extend(config_models);
}
}
self.bottom_pane.show_model_selector(&current, options);
}
/// Update the current model and reconfigure the running Codex session.
pub(crate) fn update_model_and_reconfigure(&mut self, model: String) {
// Update local config so UI reflects the new model.
let changed = self.config.model != model;
self.config.model = model.clone();
// Emit an event in the conversation log so the change is visible.
if changed {
self.add_to_history(HistoryCell::new_background_event(format!(
"Set model to {model}."
)));
}
// Reconfigure the agent session with the same provider and policies.
// Build the op from the config to avoid drift when fields are added.
let op = self
.config
.to_configure_session_op(None, self.config.user_instructions.clone());
self.submit_op(op);
self.request_redraw();
}
pub(crate) fn clear_token_usage(&mut self) {
self.token_usage = TokenUsage::default();
self.bottom_pane

View File

@@ -15,6 +15,7 @@ pub enum SlashCommand {
New,
Compact,
Diff,
Model,
Quit,
#[cfg(debug_assertions)]
TestApproval,
@@ -27,6 +28,7 @@ impl SlashCommand {
SlashCommand::New => "Start a new chat.",
SlashCommand::Compact => "Compact the chat history.",
SlashCommand::Quit => "Exit the application.",
SlashCommand::Model => "Select the model to use.",
SlashCommand::Diff => {
"Show git diff of the working directory (including untracked files)"
}