Skip to content

Commit e722f85

Browse files
CopilotoleanderCopilot
authored
[Refactor] Standardize type names: App → AppConfig, remove Settings alias (#72)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: oleander <220827+oleander@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
1 parent 7d7896d commit e722f85

File tree

10 files changed

+130
-26
lines changed

10 files changed

+130
-26
lines changed

src/bin/hook.rs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,13 +94,15 @@ impl Args {
9494
Some(Message | Template | Merge | Squash) => Ok(()),
9595
Some(Commit) | None => {
9696
let repo = Repository::open_from_env().context("Failed to open repository")?;
97-
let model = config::APP
97+
let model = config::APP_CONFIG
9898
.model
9999
.clone()
100100
.unwrap_or("gpt-4o-mini".to_string())
101101
.into();
102102
let used_tokens = commit::token_used(&model)?;
103-
let max_tokens = config::APP.max_tokens.unwrap_or(model.context_size());
103+
let max_tokens = config::APP_CONFIG
104+
.max_tokens
105+
.unwrap_or(model.context_size());
104106
let remaining_tokens = max_tokens.saturating_sub(used_tokens).max(512); // Ensure minimum 512 tokens
105107

106108
let tree = match self.sha1.as_deref() {

src/commit.rs

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use async_openai::Client;
55

66
use crate::{config, debug_output, openai, profile};
77
use crate::model::Model;
8-
use crate::config::App as Settings;
8+
use crate::config::AppConfig;
99
use crate::multi_step_integration::{generate_commit_message_local, generate_commit_message_multi_step};
1010

1111
/// The instruction template included at compile time
@@ -21,7 +21,10 @@ const INSTRUCTION_TEMPLATE: &str = include_str!("../resources/prompt.md");
2121
#[doc(hidden)]
2222
pub fn get_instruction_template() -> Result<String> {
2323
profile!("Generate instruction template");
24-
let max_length = config::APP.max_commit_length.unwrap_or(72).to_string();
24+
let max_length = config::APP_CONFIG
25+
.max_commit_length
26+
.unwrap_or(72)
27+
.to_string();
2528
let template = mustache::compile_str(INSTRUCTION_TEMPLATE)
2629
.map_err(|e| anyhow!("Template compilation error: {}", e))?
2730
.render_to_string(&hashmap! {
@@ -70,7 +73,7 @@ pub fn create_commit_request(diff: String, max_tokens: usize, model: Model) -> R
7073
/// Returns an error if:
7174
/// - max_tokens is 0
7275
/// - OpenAI API call fails
73-
pub async fn generate(patch: String, remaining_tokens: usize, model: Model, settings: Option<&Settings>) -> Result<openai::Response> {
76+
pub async fn generate(patch: String, remaining_tokens: usize, model: Model, settings: Option<&AppConfig>) -> Result<openai::Response> {
7477
profile!("Generate commit message");
7578

7679
if remaining_tokens == 0 {
@@ -80,7 +83,7 @@ pub async fn generate(patch: String, remaining_tokens: usize, model: Model, sett
8083
// Try multi-step approach first
8184
let max_length = settings
8285
.and_then(|s| s.max_commit_length)
83-
.or(config::APP.max_commit_length);
86+
.or(config::APP_CONFIG.max_commit_length);
8487

8588
// Check if we have a valid API key configuration
8689
let has_valid_api_key = if let Some(custom_settings) = settings {
@@ -91,7 +94,7 @@ pub async fn generate(patch: String, remaining_tokens: usize, model: Model, sett
9194
.unwrap_or(false)
9295
} else {
9396
// Check environment variable or config
94-
config::APP
97+
config::APP_CONFIG
9598
.openai_api_key
9699
.as_ref()
97100
.map(|key| !key.is_empty() && key != "<PLACE HOLDER FOR YOUR API KEY>")
@@ -215,7 +218,7 @@ mod tests {
215218
#[tokio::test]
216219
async fn test_missing_api_key_error() {
217220
// Create settings with no API key
218-
let settings = Settings {
221+
let settings = AppConfig {
219222
openai_api_key: None,
220223
model: Some("gpt-4o-mini".to_string()),
221224
max_tokens: Some(1024),
@@ -253,7 +256,7 @@ mod tests {
253256
#[tokio::test]
254257
async fn test_invalid_api_key_error() {
255258
// Create settings with invalid API key
256-
let settings = Settings {
259+
let settings = AppConfig {
257260
openai_api_key: Some("<PLACE HOLDER FOR YOUR API KEY>".to_string()),
258261
model: Some("gpt-4o-mini".to_string()),
259262
max_tokens: Some(1024),

src/config.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ const DEFAULT_MODEL: &str = "gpt-4o-mini";
1616
const DEFAULT_API_KEY: &str = "<PLACE HOLDER FOR YOUR API KEY>";
1717

1818
#[derive(Debug, Default, Deserialize, PartialEq, Eq, Serialize)]
19-
pub struct App {
19+
pub struct AppConfig {
2020
pub openai_api_key: Option<String>,
2121
pub model: Option<String>,
2222
pub max_tokens: Option<usize>,
@@ -32,7 +32,7 @@ pub struct ConfigPaths {
3232

3333
lazy_static! {
3434
static ref PATHS: ConfigPaths = ConfigPaths::new();
35-
pub static ref APP: App = App::new().expect("Failed to load config");
35+
pub static ref APP_CONFIG: AppConfig = AppConfig::new().expect("Failed to load config");
3636
}
3737

3838
impl ConfigPaths {
@@ -55,7 +55,7 @@ impl ConfigPaths {
5555
}
5656
}
5757

58-
impl App {
58+
impl AppConfig {
5959
pub fn new() -> Result<Self> {
6060
dotenv::dotenv().ok();
6161
PATHS.ensure_exists()?;

src/generation/mod.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
pub mod types;
2+
3+
pub use types::{CommitResponse, FileCategory, FileChange, OperationType};

src/generation/types.rs

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
use std::collections::HashMap;
2+
3+
use serde::{Deserialize, Serialize};
4+
5+
#[derive(Debug, Clone, Serialize, Deserialize)]
6+
pub struct FileChange {
7+
pub file_path: String,
8+
pub operation_type: OperationType,
9+
pub diff_content: Option<String>,
10+
pub lines_added: u32,
11+
pub lines_removed: u32,
12+
pub file_category: FileCategory,
13+
pub summary: String,
14+
pub impact_score: f32
15+
}
16+
17+
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
18+
pub enum OperationType {
19+
Added,
20+
Modified,
21+
Deleted,
22+
Renamed,
23+
Binary
24+
}
25+
26+
impl OperationType {
27+
pub fn as_str(&self) -> &'static str {
28+
match self {
29+
OperationType::Added => "added",
30+
OperationType::Modified => "modified",
31+
OperationType::Deleted => "deleted",
32+
OperationType::Renamed => "renamed",
33+
OperationType::Binary => "binary"
34+
}
35+
}
36+
}
37+
38+
impl From<&str> for OperationType {
39+
fn from(s: &str) -> Self {
40+
match s {
41+
"added" => OperationType::Added,
42+
"modified" => OperationType::Modified,
43+
"deleted" => OperationType::Deleted,
44+
"renamed" => OperationType::Renamed,
45+
"binary" => OperationType::Binary,
46+
_ => OperationType::Modified // default fallback
47+
}
48+
}
49+
}
50+
51+
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
52+
pub enum FileCategory {
53+
Source,
54+
Test,
55+
Config,
56+
Docs,
57+
Binary,
58+
Build
59+
}
60+
61+
impl FileCategory {
62+
pub fn as_str(&self) -> &'static str {
63+
match self {
64+
FileCategory::Source => "source",
65+
FileCategory::Test => "test",
66+
FileCategory::Config => "config",
67+
FileCategory::Docs => "docs",
68+
FileCategory::Binary => "binary",
69+
FileCategory::Build => "build"
70+
}
71+
}
72+
}
73+
74+
impl From<&str> for FileCategory {
75+
fn from(s: &str) -> Self {
76+
match s {
77+
"source" => FileCategory::Source,
78+
"test" => FileCategory::Test,
79+
"config" => FileCategory::Config,
80+
"docs" => FileCategory::Docs,
81+
"binary" => FileCategory::Binary,
82+
"build" => FileCategory::Build,
83+
_ => FileCategory::Source // default fallback
84+
}
85+
}
86+
}
87+
88+
/// Unified response type for commit message generation
89+
#[derive(Debug, Clone, Serialize, Deserialize)]
90+
pub struct CommitResponse {
91+
pub message: String,
92+
pub reasoning: String,
93+
pub files: HashMap<String, FileChange>
94+
}

src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ pub mod multi_step_analysis;
1111
pub mod multi_step_integration;
1212
pub mod simple_multi_step;
1313
pub mod debug_output;
14+
pub mod generation;
1415

1516
// Re-exports
1617
pub use profiling::Profile;

src/main.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use structopt::StructOpt;
55
use anyhow::Result;
66
use dotenv::dotenv;
77

8-
use crate::config::App;
8+
use crate::config::AppConfig;
99
use crate::filesystem::Filesystem;
1010

1111
#[derive(StructOpt)]
@@ -119,28 +119,28 @@ fn run_config_reset() -> Result<()> {
119119
}
120120

121121
fn run_config_model(value: String) -> Result<()> {
122-
let mut app = App::new()?;
122+
let mut app = AppConfig::new()?;
123123
app.update_model(value.clone())?;
124124
println!("✅ Model set to: {value}");
125125
Ok(())
126126
}
127127

128128
fn run_config_max_tokens(max_tokens: usize) -> Result<()> {
129-
let mut app = App::new()?;
129+
let mut app = AppConfig::new()?;
130130
app.update_max_tokens(max_tokens)?;
131131
println!("✅ Max tokens set to: {max_tokens}");
132132
Ok(())
133133
}
134134

135135
fn run_config_max_commit_length(max_commit_length: usize) -> Result<()> {
136-
let mut app = App::new()?;
136+
let mut app = AppConfig::new()?;
137137
app.update_max_commit_length(max_commit_length)?;
138138
println!("✅ Max commit length set to: {max_commit_length}");
139139
Ok(())
140140
}
141141

142142
fn run_config_openai_api_key(value: String) -> Result<()> {
143-
let mut app = App::new()?;
143+
let mut app = AppConfig::new()?;
144144
app.update_openai_api_key(value)?;
145145
println!("✅ OpenAI API key updated");
146146
Ok(())

src/model.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use colored::Colorize;
1212

1313
use crate::profile;
1414
// use crate::config::format_prompt; // Temporarily comment out
15-
use crate::config::App as Settings; // Use App as Settings
15+
use crate::config::AppConfig;
1616

1717
// Cached tokenizer for performance
1818
static TOKENIZER: OnceLock<CoreBPE> = OnceLock::new();
@@ -217,7 +217,7 @@ fn get_tokenizer(_model_str: &str) -> CoreBPE {
217217
tiktoken_rs::cl100k_base().expect("Failed to create tokenizer")
218218
}
219219

220-
pub async fn run(settings: Settings, content: String) -> Result<String> {
220+
pub async fn run(settings: AppConfig, content: String) -> Result<String> {
221221
let model_str = settings.model.as_deref().unwrap_or(DEFAULT_MODEL_NAME);
222222

223223
let client = async_openai::Client::new();

src/multi_step_analysis.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use serde::{Deserialize, Serialize};
22
use serde_json::json;
33
use async_openai::types::{ChatCompletionTool, ChatCompletionToolType, FunctionObjectArgs};
44
use anyhow::Result;
5+
// TODO: Migrate to unified types from generation module
56

67
/// File analysis result from the analyze function
78
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -23,7 +24,7 @@ pub struct FileDataForScoring {
2324
pub summary: String
2425
}
2526

26-
/// File data with calculated impact score
27+
/// File data with calculated impact score
2728
#[derive(Debug, Clone, Serialize, Deserialize)]
2829
pub struct FileWithScore {
2930
pub file_path: String,

src/openai.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ use futures::future::join_all;
1010

1111
use crate::{commit, config, debug_output, function_calling, profile};
1212
use crate::model::Model;
13-
use crate::config::App as Settings;
13+
use crate::config::AppConfig;
1414
use crate::multi_step_integration::generate_commit_message_multi_step;
1515

1616
const MAX_ATTEMPTS: usize = 3;
@@ -110,7 +110,7 @@ pub async fn generate_commit_message(diff: &str) -> Result<String> {
110110
}
111111

112112
/// Creates an OpenAI configuration from application settings
113-
pub fn create_openai_config(settings: &Settings) -> Result<OpenAIConfig> {
113+
pub fn create_openai_config(settings: &AppConfig) -> Result<OpenAIConfig> {
114114
let api_key = settings
115115
.openai_api_key
116116
.as_ref()
@@ -205,7 +205,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result<
205205
let client = Client::with_config(config.clone());
206206
let model = request.model.to_string();
207207

208-
match generate_commit_message_multi_step(&client, &model, &request.prompt, config::APP.max_commit_length).await {
208+
match generate_commit_message_multi_step(&client, &model, &request.prompt, config::APP_CONFIG.max_commit_length).await {
209209
Ok(message) => return Ok(Response { response: message }),
210210
Err(e) => {
211211
// Check if it's an API key error and propagate it
@@ -218,7 +218,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result<
218218

219219
// Original single-step implementation as fallback
220220
// Create client with timeout if specified
221-
let client = if let Some(timeout) = config::APP.timeout {
221+
let client = if let Some(timeout) = config::APP_CONFIG.timeout {
222222
let http_client = reqwest::ClientBuilder::new()
223223
.timeout(Duration::from_secs(timeout as u64))
224224
.build()?;
@@ -236,7 +236,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result<
236236
let truncated_prompt = truncate_to_fit(&request.prompt, available_tokens, &request.model)?;
237237

238238
// Create the commit function tool
239-
let commit_tool = function_calling::create_commit_function_tool(config::APP.max_commit_length)?;
239+
let commit_tool = function_calling::create_commit_function_tool(config::APP_CONFIG.max_commit_length)?;
240240

241241
let chat_request = CreateChatCompletionRequestArgs::default()
242242
.max_tokens(request.max_tokens)
@@ -382,7 +382,7 @@ pub async fn call(request: Request) -> Result<Response> {
382382
profile!("OpenAI API call");
383383

384384
// Create OpenAI configuration using our settings
385-
let config = create_openai_config(&config::APP)?;
385+
let config = create_openai_config(&config::APP_CONFIG)?;
386386

387387
// Use the call_with_config function with the default config
388388
call_with_config(request, config).await

0 commit comments

Comments
 (0)