Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ pub const fn default_stream() -> bool {
#[command(author, version, about, long_about = None)]
pub struct Cli {
/// 可选的配置文件路径
#[arg(short, long, default_value = "axec.toml")]
config: String,
// #[arg(short, long, default_value = "axec.toml")]
// config: String,

// 存储解析后的配置文件
#[clap(skip)]
Expand All @@ -47,11 +47,11 @@ impl Cli {
}

if !config_path.exists() {
fs::write(config_path, DEFAULT_CONFIG).unwrap()
fs::write(&config_path, DEFAULT_CONFIG).unwrap()
}

let config_content =
fs::read_to_string(&self.config).unwrap_or_else(|e| panic!("fail to read file: {e}"));
fs::read_to_string(&config_path).unwrap_or_else(|e| panic!("fail to read file: {e}"));

self.config_data =
toml::from_str(&config_content).unwrap_or_else(|e| panic!("config error: {e}"));
Expand Down
3 changes: 0 additions & 3 deletions src/llm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,6 @@ pub fn parse_response(
response: anyhow::Result<blocking::Response>,
) -> anyhow::Result<String> {
let json: serde_json::Value = response?.json()?;
// let text = response.unwrap().text().unwrap();
// println!("{text}");
let llm_reply = match model_type {
LLMModel::DeepSeekChat | LLMModel::DeepSeekReasoner => {
json["choices"][0]["message"]["content"].as_str().unwrap()
Expand All @@ -141,5 +139,4 @@ pub fn parse_response(
}
};
Ok(llm_reply.to_string())
// Ok("()".to_string())
}