Compare commits

..

4 Commits

Author SHA1 Message Date
Eric Traut
dd80e332c4 Removed the "remote_compaction" feature flag (#10840)
This feature is always on now
2026-02-05 23:54:57 -08:00
Eric Traut
f61226d32a Personality setting is no longer available in experimental menu (#10852)
This PR removes the inaccurate "Disable in /experimental." statement now
that the "personality" feature flag is no longer experimental.

This addresses #10850
2026-02-05 22:19:09 -08:00
Eric Traut
e5c1a2d6fb Log an event (info only) when we receive a file watcher event (#10843) 2026-02-05 20:24:16 -08:00
Ahmed Ibrahim
048e0f3888 Gate app tooltips to macOS (#10784)
- Gate app promo tips to macOS and use non-app copy elsewhere.
2026-02-05 19:18:08 -08:00
14 changed files with 70 additions and 292 deletions

View File

@@ -29,7 +29,6 @@ use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartResponse;
use codex_app_server_protocol::UserInput as V2UserInput;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::features::Feature;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ResponseItem;
use core_test_support::responses;
@@ -143,12 +142,10 @@ async fn auto_compaction_remote_emits_started_and_completed_items() -> Result<()
.await;
let codex_home = TempDir::new()?;
let mut features = BTreeMap::default();
features.insert(Feature::RemoteCompaction, true);
write_mock_responses_config_toml(
codex_home.path(),
&server.uri(),
&features,
&BTreeMap::default(),
AUTO_COMPACT_LIMIT,
Some(true),
"openai",

View File

@@ -199,9 +199,6 @@
"powershell_utf8": {
"type": "boolean"
},
"remote_compaction": {
"type": "boolean"
},
"remote_models": {
"type": "boolean"
},
@@ -1232,9 +1229,6 @@
"powershell_utf8": {
"type": "boolean"
},
"remote_compaction": {
"type": "boolean"
},
"remote_models": {
"type": "boolean"
},

View File

@@ -3877,7 +3877,7 @@ pub(crate) async fn run_turn(
}
async fn run_auto_compact(sess: &Arc<Session>, turn_context: &Arc<TurnContext>) {
if should_use_remote_compact_task(sess.as_ref(), &turn_context.provider) {
if should_use_remote_compact_task(&turn_context.provider) {
run_inline_remote_auto_compact_task(Arc::clone(sess), Arc::clone(turn_context)).await;
} else {
run_inline_auto_compact_task(Arc::clone(sess), Arc::clone(turn_context)).await;

View File

@@ -9,7 +9,6 @@ use crate::codex::TurnContext;
use crate::codex::get_last_assistant_message_from_turn;
use crate::error::CodexErr;
use crate::error::Result as CodexResult;
use crate::features::Feature;
use crate::protocol::CompactedItem;
use crate::protocol::EventMsg;
use crate::protocol::TurnContextItem;
@@ -34,11 +33,8 @@ pub const SUMMARIZATION_PROMPT: &str = include_str!("../templates/compact/prompt
pub const SUMMARY_PREFIX: &str = include_str!("../templates/compact/summary_prefix.md");
const COMPACT_USER_MESSAGE_MAX_TOKENS: usize = 20_000;
pub(crate) fn should_use_remote_compact_task(
session: &Session,
provider: &ModelProviderInfo,
) -> bool {
provider.is_openai() && session.enabled(Feature::RemoteCompaction)
pub(crate) fn should_use_remote_compact_task(provider: &ModelProviderInfo) -> bool {
provider.is_openai()
}
pub(crate) async fn run_inline_auto_compact_task(

View File

@@ -774,7 +774,7 @@ unified_exec = true
service
.write_value(ConfigValueWriteParams {
file_path: Some(tmp.path().join(CONFIG_TOML_FILE).display().to_string()),
key_path: "features.remote_compaction".to_string(),
key_path: "features.remote_models".to_string(),
value: serde_json::json!(true),
merge_strategy: MergeStrategy::Replace,
expected_version: None,
@@ -794,7 +794,7 @@ hide_full_access_warning = true
[features]
unified_exec = true
remote_compaction = true
remote_models = true
"#;
assert_eq!(updated, expected);
Ok(())

View File

@@ -97,8 +97,6 @@ pub enum Feature {
WindowsSandbox,
/// Use the elevated Windows sandbox pipeline (setup + runner).
WindowsSandboxElevated,
/// Remote compaction enabled (only for ChatGPT auth)
RemoteCompaction,
/// Refresh remote models and emit AppReady once the list is available.
RemoteModels,
/// Experimental shell snapshotting.
@@ -499,12 +497,6 @@ pub const FEATURES: &[FeatureSpec] = &[
stage: Stage::UnderDevelopment,
default_enabled: false,
},
FeatureSpec {
id: Feature::RemoteCompaction,
key: "remote_compaction",
stage: Stage::UnderDevelopment,
default_enabled: true,
},
FeatureSpec {
id: Feature::RemoteModels,
key: "remote_models",

View File

@@ -19,6 +19,7 @@ use tokio::sync::broadcast;
use tokio::sync::mpsc;
use tokio::time::Instant;
use tokio::time::sleep_until;
use tracing::info;
use tracing::warn;
use crate::config::Config;
@@ -162,6 +163,12 @@ impl FileWatcher {
res = raw_rx.recv() => {
match res {
Some(Ok(event)) => {
info!(
event_kind = ?event.kind,
event_paths = ?event.paths,
event_attrs = ?event.attrs,
"file watcher received filesystem event"
);
let skills_paths = classify_event(&event, &state);
let now = Instant::now();
skills.add(skills_paths);

View File

@@ -25,7 +25,7 @@ impl SessionTask for CompactTask {
_cancellation_token: CancellationToken,
) -> Option<String> {
let session = session.clone_session();
if crate::compact::should_use_remote_compact_task(session.as_ref(), &ctx.provider) {
if crate::compact::should_use_remote_compact_task(&ctx.provider) {
let _ = session.services.otel_manager.counter(
"codex.task.compact",
1,

View File

@@ -5,7 +5,6 @@ use codex_core::built_in_model_providers;
use codex_core::compact::SUMMARIZATION_PROMPT;
use codex_core::compact::SUMMARY_PREFIX;
use codex_core::config::Config;
use codex_core::features::Feature;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::EventMsg;
use codex_core::protocol::ItemCompletedEvent;
@@ -1464,7 +1463,6 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
let mut builder = test_codex().with_config(move |config| {
set_test_compact_prompt(config);
config.model_auto_compact_token_limit = Some(limit);
config.features.enable(Feature::RemoteCompaction);
});
let initial = builder.build(&server).await.unwrap();
let home = initial.home.clone();
@@ -1493,7 +1491,6 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
let mut resume_builder = test_codex().with_config(move |config| {
set_test_compact_prompt(config);
config.model_auto_compact_token_limit = Some(limit);
config.features.enable(Feature::RemoteCompaction);
});
let resumed = resume_builder
.resume(&server, home, rollout_path)
@@ -2290,7 +2287,6 @@ async fn auto_compact_counts_encrypted_reasoning_before_last_user() {
.with_config(|config| {
set_test_compact_prompt(config);
config.model_auto_compact_token_limit = Some(300);
config.features.enable(Feature::RemoteCompaction);
})
.build(&server)
.await
@@ -2411,7 +2407,6 @@ async fn auto_compact_runs_when_reasoning_header_clears_between_turns() {
.with_config(|config| {
set_test_compact_prompt(config);
config.model_auto_compact_token_limit = Some(300);
config.features.enable(Feature::RemoteCompaction);
})
.build(&server)
.await

View File

@@ -4,7 +4,6 @@ use std::fs;
use anyhow::Result;
use codex_core::CodexAuth;
use codex_core::features::Feature;
use codex_core::protocol::EventMsg;
use codex_core::protocol::ItemCompletedEvent;
use codex_core::protocol::ItemStartedEvent;
@@ -45,11 +44,7 @@ async fn remote_compact_replaces_history_for_followups() -> Result<()> {
skip_if_no_network!(Ok(()));
let harness = TestCodexHarness::with_builder(
test_codex()
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(|config| {
config.features.enable(Feature::RemoteCompaction);
}),
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
)
.await?;
let codex = harness.test().codex.clone();
@@ -166,11 +161,7 @@ async fn remote_compact_runs_automatically() -> Result<()> {
skip_if_no_network!(Ok(()));
let harness = TestCodexHarness::with_builder(
test_codex()
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(|config| {
config.features.enable(Feature::RemoteCompaction);
}),
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
)
.await?;
let codex = harness.test().codex.clone();
@@ -253,7 +244,6 @@ async fn remote_compact_trims_function_call_history_to_fit_context_window() -> R
test_codex()
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(|config| {
config.features.enable(Feature::RemoteCompaction);
config.model_context_window = Some(2_000);
}),
)
@@ -384,7 +374,6 @@ async fn remote_compact_trim_estimate_uses_session_base_instructions() -> Result
test_codex()
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(|config| {
config.features.enable(Feature::RemoteCompaction);
config.model_context_window = Some(200_000);
}),
)
@@ -482,7 +471,6 @@ async fn remote_compact_trim_estimate_uses_session_base_instructions() -> Result
.with_config({
let override_base_instructions = override_base_instructions.clone();
move |config| {
config.features.enable(Feature::RemoteCompaction);
config.model_context_window = Some(override_context_window);
config.base_instructions = Some(override_base_instructions);
}
@@ -575,11 +563,7 @@ async fn remote_manual_compact_emits_context_compaction_items() -> Result<()> {
skip_if_no_network!(Ok(()));
let harness = TestCodexHarness::with_builder(
test_codex()
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(|config| {
config.features.enable(Feature::RemoteCompaction);
}),
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
)
.await?;
let codex = harness.test().codex.clone();
@@ -671,11 +655,7 @@ async fn remote_compact_persists_replacement_history_in_rollout() -> Result<()>
skip_if_no_network!(Ok(()));
let harness = TestCodexHarness::with_builder(
test_codex()
.with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing())
.with_config(|config| {
config.features.enable(Feature::RemoteCompaction);
}),
test_codex().with_auth(CodexAuth::create_dummy_chatgpt_auth_for_testing()),
)
.await?;
let codex = harness.test().codex.clone();

View File

@@ -1643,7 +1643,16 @@ impl ChatWidget {
}
if let Some(combined) = self.drain_queued_messages_for_restore() {
self.restore_user_message_to_composer(combined);
let combined_local_image_paths = combined
.local_images
.iter()
.map(|img| img.path.clone())
.collect();
self.bottom_pane.set_composer_text(
combined.text,
combined.text_elements,
combined_local_image_paths,
);
self.refresh_queued_user_messages();
}
@@ -1706,18 +1715,6 @@ impl ChatWidget {
Some(combined)
}
fn restore_user_message_to_composer(&mut self, user_message: UserMessage) {
let UserMessage {
text,
local_images,
text_elements,
mention_paths: _,
} = user_message;
let local_image_paths = local_images.into_iter().map(|img| img.path).collect();
self.bottom_pane
.set_composer_text(text, text_elements, local_image_paths);
}
fn on_plan_update(&mut self, update: UpdatePlanArgs) {
self.saw_plan_update_this_turn = true;
self.add_to_history(history_cell::new_plan_update(update));
@@ -3006,7 +3003,16 @@ impl ChatWidget {
} if !self.queued_user_messages.is_empty() => {
// Prefer the most recently queued item.
if let Some(user_message) = self.queued_user_messages.pop_back() {
self.restore_user_message_to_composer(user_message);
let local_image_paths = user_message
.local_images
.iter()
.map(|img| img.path.clone())
.collect();
self.bottom_pane.set_composer_text(
user_message.text,
user_message.text_elements,
local_image_paths,
);
self.refresh_queued_user_messages();
self.request_redraw();
}
@@ -3024,8 +3030,8 @@ impl ChatWidget {
text_elements,
mention_paths: self.bottom_pane.take_mention_paths(),
};
if self.is_session_configured() && !self.is_plan_streaming_in_tui() {
// Submitted is only emitted when steer is enabled.
if self.is_session_configured() {
// Submitted is only emitted when steer is enabled (Enter sends immediately).
// Reset any reasoning header only when we are actually submitting a turn.
self.reasoning_buffer.clear();
self.full_reasoning_buffer.clear();
@@ -4566,9 +4572,7 @@ impl ChatWidget {
let mut header = ColumnRenderable::new();
header.push(Line::from("Select Personality".bold()));
header.push(Line::from(
"Choose a communication style for Codex. Disable in /experimental.".dim(),
));
header.push(Line::from("Choose a communication style for Codex.".dim()));
self.bottom_pane.show_selection_view(SelectionViewParams {
header: Box::new(header),
@@ -6411,10 +6415,6 @@ impl ChatWidget {
self.bottom_pane.is_task_running() || self.is_review_mode
}
fn is_plan_streaming_in_tui(&self) -> bool {
self.plan_stream_controller.is_some()
}
pub(crate) fn composer_is_empty(&self) -> bool {
self.bottom_pane.composer_is_empty()
}
@@ -6424,27 +6424,8 @@ impl ChatWidget {
text: String,
collaboration_mode: CollaborationModeMask,
) {
if self.agent_turn_running
&& self.active_collaboration_mask.as_ref() != Some(&collaboration_mode)
{
self.add_error_message(
"Cannot switch collaboration mode while a turn is running.".to_string(),
);
return;
}
self.set_collaboration_mask(collaboration_mode);
let should_queue = self.is_plan_streaming_in_tui();
let user_message = UserMessage {
text,
local_images: Vec::new(),
text_elements: Vec::new(),
mention_paths: HashMap::new(),
};
if should_queue {
self.queue_user_message(user_message);
} else {
self.submit_user_message(user_message);
}
self.submit_user_message(text.into());
}
/// True when the UI is in the regular composer state with no running task,

View File

@@ -3,7 +3,7 @@ source: tui/src/chatwidget/tests.rs
expression: popup
---
Select Personality
Choose a communication style for Codex. Disable in /experimental.
Choose a communication style for Codex.
1. Friendly Warm, collaborative, and helpful.
2. Pragmatic (current) Concise, task-focused, and direct.

View File

@@ -527,54 +527,6 @@ async fn interrupted_turn_restores_queued_messages_with_images_and_elements() {
);
}
#[tokio::test]
async fn interrupted_turn_restore_keeps_active_mode_for_resubmission() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask = collaboration_modes::plan_mask(chat.models_manager.as_ref())
.expect("expected plan collaboration mode");
let expected_mode = plan_mask
.mode
.expect("expected mode kind on plan collaboration mode");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.queued_user_messages.push_back(UserMessage {
text: "Implement the plan.".to_string(),
local_images: Vec::new(),
text_elements: Vec::new(),
mention_paths: HashMap::new(),
});
chat.refresh_queued_user_messages();
chat.handle_codex_event(Event {
id: "interrupt".into(),
msg: EventMsg::TurnAborted(codex_core::protocol::TurnAbortedEvent {
reason: TurnAbortReason::Interrupted,
}),
});
assert_eq!(chat.bottom_pane.composer_text(), "Implement the plan.");
assert!(chat.queued_user_messages.is_empty());
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode: Some(CollaborationMode { mode, .. }),
personality: None,
..
} => assert_eq!(mode, expected_mode),
other => {
panic!("expected Op::UserTurn with active mode, got {other:?}")
}
}
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
}
#[tokio::test]
async fn remap_placeholders_uses_attachment_labels() {
let placeholder_one = "[Image #1]";
@@ -1406,97 +1358,6 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
}
}
#[tokio::test]
async fn submit_user_message_with_mode_errors_when_mode_changes_during_running_turn() {
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
let default_mode = collaboration_modes::default_mask(chat.models_manager.as_ref())
.expect("expected default collaboration mode");
chat.submit_user_message_with_mode("Implement the plan.".to_string(), default_mode);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert!(chat.queued_user_messages.is_empty());
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
let rendered = drain_insert_history(&mut rx)
.iter()
.map(|lines| lines_to_single_string(lines))
.collect::<Vec<_>>()
.join("\n");
assert!(
rendered.contains("Cannot switch collaboration mode while a turn is running."),
"expected running-turn error message, got: {rendered:?}"
);
}
#[tokio::test]
async fn submit_user_message_with_mode_allows_same_mode_during_running_turn() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask.clone());
chat.on_task_started();
chat.submit_user_message_with_mode("Continue planning.".to_string(), plan_mask);
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert!(chat.queued_user_messages.is_empty());
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode:
Some(CollaborationMode {
mode: ModeKind::Plan,
..
}),
personality: None,
..
} => {}
other => {
panic!("expected Op::UserTurn with plan collab mode, got {other:?}")
}
}
}
#[tokio::test]
async fn submit_user_message_with_mode_submits_when_plan_stream_is_not_active() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
let default_mode = collaboration_modes::default_mask(chat.models_manager.as_ref())
.expect("expected default collaboration mode");
let expected_mode = default_mode
.mode
.expect("expected default collaboration mode kind");
chat.submit_user_message_with_mode("Implement the plan.".to_string(), default_mode);
assert_eq!(chat.active_collaboration_mode_kind(), expected_mode);
assert!(chat.queued_user_messages.is_empty());
match next_submit_op(&mut op_rx) {
Op::UserTurn {
collaboration_mode: Some(CollaborationMode { mode, .. }),
personality: None,
..
} => assert_eq!(mode, expected_mode),
other => {
panic!("expected Op::UserTurn with default collab mode, got {other:?}")
}
}
}
#[tokio::test]
async fn plan_implementation_popup_skips_replayed_turn_complete() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
@@ -2125,55 +1986,6 @@ async fn unified_exec_begin_restores_working_status_snapshot() {
);
}
#[tokio::test]
async fn steer_enter_queues_while_plan_stream_is_active() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.on_plan_delta("- Step 1".to_string());
chat.bottom_pane
.set_composer_text("queued submission".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert_eq!(chat.queued_user_messages.len(), 1);
assert_eq!(
chat.queued_user_messages.front().unwrap().text,
"queued submission"
);
assert_matches!(op_rx.try_recv(), Err(TryRecvError::Empty));
}
#[tokio::test]
async fn steer_enter_submits_when_plan_stream_is_not_active() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mask");
chat.set_collaboration_mask(plan_mask);
chat.on_task_started();
chat.bottom_pane
.set_composer_text("submitted immediately".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
assert!(chat.queued_user_messages.is_empty());
match next_submit_op(&mut op_rx) {
Op::UserTurn {
personality: None, ..
} => {}
other => panic!("expected Op::UserTurn, got {other:?}"),
}
}
#[tokio::test]
async fn ctrl_c_shutdown_works_with_caps_lock() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;

View File

@@ -6,9 +6,13 @@ use rand::Rng;
const ANNOUNCEMENT_TIP_URL: &str =
"https://raw.githubusercontent.com/openai/codex/main/announcement_tip.toml";
const IS_MACOS: bool = cfg!(target_os = "macos");
const PAID_TOOLTIP: &str = "*New* Try the **Codex App** with 2x rate limits until *April 2nd*. Run 'codex app' or visit https://chatgpt.com/codex";
const PAID_TOOLTIP_NON_MAC: &str = "*New* 2x rate limits until *April 2nd*.";
const OTHER_TOOLTIP: &str =
"*New* Build faster with the **Codex App**. Run 'codex app' or visit https://chatgpt.com/codex";
const OTHER_TOOLTIP_NON_MAC: &str = "*New* Build faster with Codex.";
const FREE_GO_TOOLTIP: &str =
"*New* Codex is included in your plan for free through *March 2nd* lets build together.";
@@ -18,7 +22,15 @@ lazy_static! {
static ref TOOLTIPS: Vec<&'static str> = RAW_TOOLTIPS
.lines()
.map(str::trim)
.filter(|line| !line.is_empty() && !line.starts_with('#'))
.filter(|line| {
if line.is_empty() || line.starts_with('#') {
return false;
}
if !IS_MACOS && line.contains("codex app") {
return false;
}
true
})
.collect();
static ref ALL_TOOLTIPS: Vec<&'static str> = {
let mut tips = Vec::new();
@@ -51,12 +63,24 @@ pub(crate) fn get_tooltip(plan: Option<PlanType>) -> Option<String> {
| Some(PlanType::Team)
| Some(PlanType::Enterprise)
| Some(PlanType::Pro) => {
return Some(PAID_TOOLTIP.to_string());
let tooltip = if IS_MACOS {
PAID_TOOLTIP
} else {
PAID_TOOLTIP_NON_MAC
};
return Some(tooltip.to_string());
}
Some(PlanType::Go) | Some(PlanType::Free) => {
return Some(FREE_GO_TOOLTIP.to_string());
}
_ => return Some(OTHER_TOOLTIP.to_string()),
_ => {
let tooltip = if IS_MACOS {
OTHER_TOOLTIP
} else {
OTHER_TOOLTIP_NON_MAC
};
return Some(tooltip.to_string());
}
}
}