Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion codex-rs/app-server/tests/common/models_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
priority,
upgrade: preset.upgrade.as_ref().map(|u| u.into()),
base_instructions: "base instructions".to_string(),
model_instructions_template: None,
model_messages: None,
supports_reasoning_summaries: false,
support_verbosity: false,
default_verbosity: None,
Expand Down
13 changes: 7 additions & 6 deletions codex-rs/app-server/tests/suite/v2/thread_resume.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ use tempfile::TempDir;
use tokio::time::timeout;

const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
const DEFAULT_BASE_INSTRUCTIONS: &str = "You are Codex, based on GPT-5. You are running as a coding agent in the Codex CLI on a user's computer.";
const CODEX_5_2_INSTRUCTIONS_TEMPLATE_DEFAULT: &str = "You are Codex, a coding agent based on GPT-5. You and the user share the same workspace and collaborate to achieve the user's goals.";

#[tokio::test]
async fn thread_resume_returns_original_thread() -> Result<()> {
Expand Down Expand Up @@ -368,7 +368,7 @@ async fn thread_resume_supports_history_and_overrides() -> Result<()> {
}

#[tokio::test]
async fn thread_resume_accepts_personality_override_v2() -> Result<()> {
async fn thread_resume_accepts_personality_override() -> Result<()> {
skip_if_no_network!(Ok(()));

let server = responses::start_mock_server().await;
Expand Down Expand Up @@ -438,14 +438,14 @@ async fn thread_resume_accepts_personality_override_v2() -> Result<()> {
let request = response_mock.single_request();
let developer_texts = request.message_input_texts("developer");
assert!(
!developer_texts
developer_texts
.iter()
.any(|text| text.contains("<personality_spec>")),
"did not expect a personality update message in developer input, got {developer_texts:?}"
"expected a personality update message in developer input, got {developer_texts:?}"
);
let instructions_text = request.instructions_text();
assert!(
instructions_text.contains(DEFAULT_BASE_INSTRUCTIONS),
instructions_text.contains(CODEX_5_2_INSTRUCTIONS_TEMPLATE_DEFAULT),
"expected default base instructions from history, got {instructions_text:?}"
);

Expand All @@ -459,14 +459,15 @@ fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io
config_toml,
format!(
r#"
model = "mock-model"
model = "gpt-5.2-codex"
approval_policy = "never"
sandbox_mode = "read-only"

model_provider = "mock_provider"

[features]
remote_models = false
personality = true

[model_providers.mock_provider]
name = "Mock provider for test"
Expand Down
9 changes: 5 additions & 4 deletions codex-rs/app-server/tests/suite/v2/turn_start.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ async fn turn_start_sends_originator_header() -> Result<()> {
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::default(),
&BTreeMap::from([(Feature::Personality, true)]),
)?;

let mut mcp = McpProcess::new(codex_home.path()).await?;
Expand Down Expand Up @@ -138,7 +138,7 @@ async fn turn_start_emits_user_message_item_with_text_elements() -> Result<()> {
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::default(),
&BTreeMap::from([(Feature::Personality, true)]),
)?;

let mut mcp = McpProcess::new(codex_home.path()).await?;
Expand Down Expand Up @@ -230,7 +230,7 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<(
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::default(),
&BTreeMap::from([(Feature::Personality, true)]),
)?;

let mut mcp = McpProcess::new(codex_home.path()).await?;
Expand Down Expand Up @@ -425,7 +425,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::default(),
&BTreeMap::from([(Feature::Personality, true)]),
)?;

let mut mcp = McpProcess::new(codex_home.path()).await?;
Expand Down Expand Up @@ -473,6 +473,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
if developer_texts.is_empty() {
eprintln!("request body: {}", request.body_json());
}

assert!(
developer_texts
.iter()
Expand Down
2 changes: 1 addition & 1 deletion codex-rs/codex-api/tests/models_integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ async fn models_client_hits_models_endpoint() {
priority: 1,
upgrade: None,
base_instructions: "base instructions".to_string(),
model_instructions_template: None,
model_messages: None,
supports_reasoning_summaries: false,
support_verbosity: false,
default_verbosity: None,
Expand Down
52 changes: 37 additions & 15 deletions codex-rs/core/src/codex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1265,27 +1265,31 @@ impl Session {
previous: Option<&Arc<TurnContext>>,
next: &TurnContext,
) -> Option<ResponseItem> {
let personality = next.personality?;
if let Some(prev) = previous
&& prev.personality == Some(personality)
{
if !self.features.enabled(Feature::Personality) {
return None;
}
let model_info = next.client.get_model_info();
let personality_message = Self::personality_message_for(&model_info, personality);
let previous = previous?;

personality_message.map(|personality_message| {
DeveloperInstructions::personality_spec_message(personality_message).into()
})
// if a personality is specified and it's different from the previous one, build a personality update item
if let Some(personality) = next.personality
&& next.personality != previous.personality
{
let model_info = next.client.get_model_info();
let personality_message = Self::personality_message_for(&model_info, personality);
personality_message.map(|personality_message| {
DeveloperInstructions::personality_spec_message(personality_message).into()
})
} else {
None
}
}

fn personality_message_for(model_info: &ModelInfo, personality: Personality) -> Option<String> {
model_info
.model_instructions_template
.model_messages
.as_ref()
.and_then(|template| template.personality_messages.as_ref())
.and_then(|messages| messages.0.get(&personality))
.cloned()
.and_then(|spec| spec.get_personality_message(Some(personality)))
.filter(|message| !message.is_empty())
}

fn build_collaboration_mode_update_item(
Expand Down Expand Up @@ -1825,15 +1829,33 @@ impl Session {
items.push(DeveloperInstructions::new(developer_instructions.to_string()).into());
}
// Add developer instructions from collaboration_mode if they exist and are non-empty
let collaboration_mode = {
let (collaboration_mode, base_instructions) = {
let state = self.state.lock().await;
state.session_configuration.collaboration_mode.clone()
(
state.session_configuration.collaboration_mode.clone(),
state.session_configuration.base_instructions.clone(),
)
};
if let Some(collab_instructions) =
DeveloperInstructions::from_collaboration_mode(&collaboration_mode)
{
items.push(collab_instructions.into());
}
if self.features.enabled(Feature::Personality)
&& let Some(personality) = turn_context.personality
{
let model_info = turn_context.client.get_model_info();
let has_baked_personality = model_info.supports_personality()
&& base_instructions == model_info.get_model_instructions(Some(personality));
if !has_baked_personality
&& let Some(personality_message) =
Self::personality_message_for(&model_info, personality)
{
items.push(
DeveloperInstructions::personality_spec_message(personality_message).into(),
);
}
}
if let Some(user_instructions) = turn_context.user_instructions.as_deref() {
items.push(
UserInstructions {
Expand Down
55 changes: 28 additions & 27 deletions codex-rs/core/src/models_manager/model_info.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@
use std::collections::BTreeMap;

use codex_protocol::config_types::Personality;
use codex_protocol::config_types::Verbosity;
use codex_protocol::openai_models::ApplyPatchToolType;
use codex_protocol::openai_models::ConfigShellToolType;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelInstructionsTemplate;
use codex_protocol::openai_models::ModelInstructionsVariables;
use codex_protocol::openai_models::ModelMessages;
use codex_protocol::openai_models::ModelVisibility;
use codex_protocol::openai_models::PersonalityMessages;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::openai_models::ReasoningEffortPreset;
use codex_protocol::openai_models::TruncationMode;
use codex_protocol::openai_models::TruncationPolicyConfig;

use crate::config::Config;
use crate::features::Feature;
use crate::truncate::approx_bytes_for_tokens;
use tracing::warn;

Expand All @@ -29,8 +27,11 @@ const GPT_5_1_CODEX_MAX_INSTRUCTIONS: &str = include_str!("../../gpt-5.1-codex-m
const GPT_5_2_CODEX_INSTRUCTIONS: &str = include_str!("../../gpt-5.2-codex_prompt.md");
const GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE: &str =
include_str!("../../templates/model_instructions/gpt-5.2-codex_instructions_template.md");
const PERSONALITY_FRIENDLY: &str = include_str!("../../templates/personalities/friendly.md");
const PERSONALITY_PRAGMATIC: &str = include_str!("../../templates/personalities/pragmatic.md");

const GPT_5_2_CODEX_PERSONALITY_FRIENDLY: &str =
include_str!("../../templates/personalities/gpt-5.2-codex_friendly.md");
const GPT_5_2_CODEX_PERSONALITY_PRAGMATIC: &str =
include_str!("../../templates/personalities/gpt-5.2-codex_pragmatic.md");

pub(crate) const CONTEXT_WINDOW_272K: i64 = 272_000;

Expand All @@ -54,7 +55,7 @@ macro_rules! model_info {
priority: 99,
upgrade: None,
base_instructions: BASE_INSTRUCTIONS.to_string(),
model_instructions_template: None,
model_messages: None,
supports_reasoning_summaries: false,
support_verbosity: false,
default_verbosity: None,
Expand Down Expand Up @@ -100,8 +101,11 @@ pub(crate) fn with_config_overrides(mut model: ModelInfo, config: &Config) -> Mo

if let Some(base_instructions) = &config.base_instructions {
model.base_instructions = base_instructions.clone();
model.model_instructions_template = None;
model.model_messages = None;
} else if !config.features.enabled(Feature::Personality) {
model.model_messages = None;
}

model
}

Expand Down Expand Up @@ -169,15 +173,13 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
model_info!(
slug,
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
model_instructions_template: Some(ModelInstructionsTemplate {
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
Personality::Friendly,
PERSONALITY_FRIENDLY.to_string(),
), (
Personality::Pragmatic,
PERSONALITY_PRAGMATIC.to_string(),
)]))),
model_messages: Some(ModelMessages {
instructions_template: Some(GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string()),
instructions_variables: Some(ModelInstructionsVariables {
personality_default: Some("".to_string()),
personality_friendly: Some(GPT_5_2_CODEX_PERSONALITY_FRIENDLY.to_string()),
personality_pragmatic: Some(GPT_5_2_CODEX_PERSONALITY_PRAGMATIC.to_string()),
}),
}),
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
shell_type: ConfigShellToolType::ShellCommand,
Expand Down Expand Up @@ -213,15 +215,14 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo {
truncation_policy: TruncationPolicyConfig::tokens(10_000),
context_window: Some(CONTEXT_WINDOW_272K),
supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh(),
model_instructions_template: Some(ModelInstructionsTemplate {
template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(),
personality_messages: Some(PersonalityMessages(BTreeMap::from([(
Personality::Friendly,
PERSONALITY_FRIENDLY.to_string(),
), (
Personality::Pragmatic,
PERSONALITY_PRAGMATIC.to_string(),
)]))),
base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(),
model_messages: Some(ModelMessages {
instructions_template: Some(GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string()),
instructions_variables: Some(ModelInstructionsVariables {
personality_default: Some("".to_string()),
personality_friendly: Some(GPT_5_2_CODEX_PERSONALITY_FRIENDLY.to_string()),
personality_pragmatic: Some(GPT_5_2_CODEX_PERSONALITY_PRAGMATIC.to_string()),
}),
}),
)
} else if slug.starts_with("gpt-5.1-codex-max") {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
You are Codex, a coding agent based on GPT-5. You and the user share the same workspace and collaborate to achieve the user's goals.

# Personality

{{ personality_message }}
{{ personality }}

## Tone and style
- Anything you say outside of tool use is shown to the user. Do not narrate abstractly; explain what you are doing and why, using plain language.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Personality

You are a deeply pragmatic, effective software engineer. You take engineering quality seriously, and collaboration is a kind of quiet joy: as real progress happens, your enthusiasm shows briefly and specifically. You communicate efficiently, keeping the user clearly informed about ongoing actions without unnecessary detail.

## Values
Expand Down
2 changes: 1 addition & 1 deletion codex-rs/core/tests/suite/models_cache_ttl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ fn test_remote_model(slug: &str, priority: i32) -> ModelInfo {
priority,
upgrade: None,
base_instructions: "base instructions".to_string(),
model_instructions_template: None,
model_messages: None,
supports_reasoning_summaries: false,
support_verbosity: false,
default_verbosity: None,
Expand Down
Loading
Loading