Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 21 additions & 48 deletions codex-rs/core/src/openai_models/model_presets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "gpt-5.2-codex".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: true,
supported_in_api: true,
},
Expand All @@ -92,12 +87,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "gpt-5.2-codex".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: true,
supported_in_api: true,
},
Expand Down Expand Up @@ -126,12 +116,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "gpt-5.2-codex".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: true,
supported_in_api: true,
},
Expand All @@ -157,12 +142,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "gpt-5.2-codex".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: false,
supported_in_api: true,
},
Expand All @@ -183,12 +163,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "gpt-5.1-codex-mini".to_string(),
reasoning_effort_mapping: None,
migration_config_key: HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG.to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: false,
supported_in_api: true,
},
Expand All @@ -214,12 +189,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: false,
supported_in_api: true,
},
Expand Down Expand Up @@ -248,12 +218,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "gpt-5.2-codex".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: false,
supported_in_api: true,
},
Expand All @@ -278,18 +243,26 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: Some(ModelUpgrade {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
upgrade: Some(gpt_52_codex_upgrade()),
show_in_picker: false,
supported_in_api: true,
},
]
});

fn gpt_52_codex_upgrade() -> ModelUpgrade {
ModelUpgrade {
id: "gpt-5.2-codex".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
upgrade_copy: Some(
"Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work."
.to_string(),
),
}
}

pub(super) fn builtin_model_presets(_auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
PRESETS
.iter()
Expand Down
4 changes: 4 additions & 0 deletions codex-rs/core/tests/suite/list_models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,10 @@ fn gpt52_codex_upgrade() -> codex_protocol::openai_models::ModelUpgrade {
reasoning_effort_mapping: None,
migration_config_key: "gpt-5.2-codex".to_string(),
model_link: Some("https://openai.com/index/introducing-gpt-5-2-codex".to_string()),
upgrade_copy: Some(
"Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work."
.to_string(),
),
}
}

Expand Down
2 changes: 2 additions & 0 deletions codex-rs/protocol/src/openai_models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ pub struct ModelUpgrade {
pub reasoning_effort_mapping: Option<HashMap<ReasoningEffort, ReasoningEffort>>,
pub migration_config_key: String,
pub model_link: Option<String>,
pub upgrade_copy: Option<String>,
}

/// Metadata describing a Codex-supported model.
Expand Down Expand Up @@ -219,6 +220,7 @@ impl From<ModelInfo> for ModelPreset {
migration_config_key: info.slug.clone(),
// todo(aibrahim): add the model link here.
model_link: None,
upgrade_copy: None,
}),
show_in_picker: info.visibility == ModelVisibility::List,
supported_in_api: info.supported_in_api,
Expand Down
3 changes: 3 additions & 0 deletions codex-rs/tui/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ async fn handle_model_migration_prompt_if_needed(
reasoning_effort_mapping,
migration_config_key,
model_link,
upgrade_copy,
}) = upgrade
{
if migration_prompt_hidden(config, migration_config_key.as_str()) {
Expand Down Expand Up @@ -227,6 +228,7 @@ async fn handle_model_migration_prompt_if_needed(
model,
&target_model,
model_link.clone(),
upgrade_copy.clone(),
heading_label,
target_description,
can_opt_out,
Expand Down Expand Up @@ -1398,6 +1400,7 @@ mod tests {
reasoning_effort_mapping: None,
migration_config_key: HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG.to_string(),
model_link: None,
upgrade_copy: None,
});
available.retain(|preset| preset.model != "gpt-5-codex");
available.push(current.clone());
Expand Down
49 changes: 34 additions & 15 deletions codex-rs/tui/src/model_migration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,26 +59,36 @@ pub(crate) fn migration_copy_for_models(
current_model: &str,
target_model: &str,
model_link: Option<String>,
migration_copy: Option<String>,
target_display_name: String,
target_description: Option<String>,
can_opt_out: bool,
) -> ModelMigrationCopy {
let heading_text = Span::from(format!("Try {target_display_name}")).bold();
let description_line = target_description
.filter(|desc| !desc.is_empty())
.map(Line::from)
.unwrap_or_else(|| {
Line::from(format!(
"{target_display_name} is recommended for better performance and reliability."
))
});

let mut content = vec![
Line::from(format!(
let heading_text = Span::from(format!(
"Codex just got an upgrade. Introducing {target_display_name}."
))
.bold();
let description_line: Line<'static>;
if let Some(migration_copy) = &migration_copy {
description_line = Line::from(migration_copy.clone());
} else {
description_line = target_description
.filter(|desc| !desc.is_empty())
.map(Line::from)
.unwrap_or_else(|| {
Line::from(format!(
"{target_display_name} is recommended for better performance and reliability."
))
});
}

let mut content = vec![];
if migration_copy.is_none() {
content.push(Line::from(format!(
"We recommend switching from {current_model} to {target_model}."
)),
Line::from(""),
];
)));
content.push(Line::from(""));
}

if let Some(model_link) = model_link {
content.push(Line::from(vec![
Expand Down Expand Up @@ -364,6 +374,10 @@ mod tests {
"gpt-5.1-codex-mini",
"gpt-5.1-codex-max",
None,
Some(
"Upgrade to gpt-5.2-codex for the latest and greatest agentic coding model."
.to_string(),
),
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
true,
Expand Down Expand Up @@ -391,6 +405,7 @@ mod tests {
"gpt-5",
"gpt-5.1",
Some("https://www.codex.com/models/gpt-5.1".to_string()),
None,
"gpt-5.1".to_string(),
Some("Broad world knowledge with strong general reasoning.".to_string()),
false,
Expand All @@ -416,6 +431,7 @@ mod tests {
"gpt-5-codex",
"gpt-5.1-codex-max",
Some("https://www.codex.com/models/gpt-5.1-codex-max".to_string()),
None,
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
false,
Expand All @@ -441,6 +457,7 @@ mod tests {
"gpt-5-codex-mini",
"gpt-5.1-codex-mini",
Some("https://www.codex.com/models/gpt-5.1-codex-mini".to_string()),
None,
"gpt-5.1-codex-mini".to_string(),
Some("Optimized for codex. Cheaper, faster, but less capable.".to_string()),
false,
Expand All @@ -462,6 +479,7 @@ mod tests {
"gpt-old",
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
None,
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
Expand Down Expand Up @@ -489,6 +507,7 @@ mod tests {
"gpt-old",
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
None,
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
Expand Down
11 changes: 11 additions & 0 deletions codex-rs/tui/src/selection_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,15 @@ pub(crate) fn selection_option_row(
index: usize,
label: String,
is_selected: bool,
) -> Box<dyn Renderable> {
selection_option_row_with_dim(index, label, is_selected, false)
}

pub(crate) fn selection_option_row_with_dim(
index: usize,
label: String,
is_selected: bool,
dim: bool,
) -> Box<dyn Renderable> {
let prefix = if is_selected {
format!("› {}. ", index + 1)
Expand All @@ -19,6 +28,8 @@ pub(crate) fn selection_option_row(
};
let style = if is_selected {
Style::default().cyan()
} else if dim {
Style::default().dim()
} else {
Style::default()
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@ source: tui/src/model_migration.rs
expression: terminal.backend()
---

> Try gpt-5.1-codex-max
> Codex just got an upgrade. Introducing gpt-5.1-codex-max.

We recommend switching from gpt-5.1-codex-mini to
gpt-5.1-codex-max.

Codex-optimized flagship for deep and fast reasoning.
Upgrade to gpt-5.2-codex for the latest and greatest
agentic coding model.

You can continue using gpt-5.1-codex-mini if you prefer.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ source: tui/src/model_migration.rs
expression: terminal.backend()
---

> Try gpt-5.1-codex-max
> Codex just got an upgrade. Introducing gpt-5.1-codex-max.

We recommend switching from gpt-5-codex to
gpt-5.1-codex-max.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ source: tui/src/model_migration.rs
expression: terminal.backend()
---

> Try gpt-5.1-codex-mini
> Codex just got an upgrade. Introducing gpt-5.1-codex-mini.

We recommend switching from gpt-5-codex-mini to
gpt-5.1-codex-mini.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ source: tui/src/model_migration.rs
expression: terminal.backend()
---

> Try gpt-5.1
> Codex just got an upgrade. Introducing gpt-5.1.

We recommend switching from gpt-5 to gpt-5.1.

Expand Down
2 changes: 1 addition & 1 deletion codex-rs/tui2/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ async fn handle_model_migration_prompt_if_needed(
id: target_model,
reasoning_effort_mapping,
migration_config_key,
model_link: _,
..
}) = upgrade
{
if migration_prompt_hidden(config, migration_config_key.as_str()) {
Expand Down
Loading