Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/target
22 changes: 22 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
[package]
name = "axec"
version = "0.1.0"
authors = ["AaravLu <demolemon@outlook.at>"]
edition = "2024"

[profile.dev]
debug = false
codegen-units = 16

[profile.release]
debug = false

[dependencies]
anyhow = "1.0"
clap = { version = "4.5", features = ["derive"] }
reqwest = { version = "0.12", features = ["json", "blocking"] }
rustyline = "16.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
# tokio = { version = "1.0", features = ["full"] }
toml = "0.8"
17 changes: 17 additions & 0 deletions axec.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# This file is used for tests.
[general]
selected = "deepseek"

[deepseek]
# name = "deepseek-reasoner"
name = "deepseek-chat"
api_key = "sk-2f2d2bf56d0247a2922f68cc67eea799"

[openai]
# name = "gpt-3.5-turbo"
name = "gpt-4o"
api_key = ""

[claude]
name = "claude-3.5"
api_key = ""
119 changes: 119 additions & 0 deletions src/cli.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
use std::fs;

use clap::Parser;
use serde::Deserialize;
use toml;

// TOML配置文件对应的结构体
#[derive(Debug, Deserialize)]
pub struct Config {
general: Option<General>,
deepseek: Option<Deepseek>,
openai: Option<Openai>,
claude: Option<Claude>,
}

#[derive(Debug, Deserialize)]
struct General {
selected: String,
}

#[derive(Debug, Deserialize)]
pub struct Deepseek {
pub name: Option<String>,
pub api_key: Option<String>,
}

#[derive(Debug, Deserialize)]
pub struct Openai {
name: Option<String>,
api_key: Option<String>,
}

#[derive(Debug, Deserialize)]
pub struct Claude {
name: Option<String>,
api_key: Option<String>,
}

// Cli结构体,包含Clap字段和配置文件
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Cli {
/// 可选的配置文件路径
#[arg(short, long, default_value = "axec.toml")]
config: String,

// 存储解析后的配置文件
#[clap(skip)]
pub config_data: Option<Config>,
}

impl Cli {
// 加载和解析TOML配置文件
pub fn load_config(mut self) -> Self {
let config_content =
fs::read_to_string(&self.config).unwrap_or_else(|e| panic!("fail to read file: {e}"));

self.config_data =
toml::from_str(&config_content).unwrap_or_else(|e| panic!("config error: {e}"));
self
}

// 获取selected值
pub fn get_selected(&self) -> Option<&str> {
self.config_data
.as_ref()
.and_then(|config| config.general.as_ref())
.map(|general| general.selected.as_ref())
}

// 获取deepseek配置
pub fn get_deepseek(&self) -> Option<&Deepseek> {
self.config_data
.as_ref()
.and_then(|config| config.deepseek.as_ref())
}

// 获取openai配置
pub fn get_openai(&self) -> Option<&Openai> {
self.config_data
.as_ref()
.and_then(|config| config.openai.as_ref())
}

// 获取claude配置
pub fn get_claude(&self) -> Option<&Claude> {
self.config_data
.as_ref()
.and_then(|config| config.claude.as_ref())
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn read_config_file() {
fs::read_to_string("axec.toml").expect("Cannot find file");
}
#[test]
fn read_and_parse_config_file() {
let config_content = fs::read_to_string("axec.toml").expect("Cannot find file");

let config_data: Config =
toml::from_str(&config_content).unwrap_or_else(|e| panic!("config error: {e}"));

let selected = config_data
.general
.as_ref()
.map(|general| general.selected.as_str());

assert_eq!(
Some("sk-2f2d2bf56d0247a2922f68cc67eea799".to_string()),
config_data.deepseek.and_then(|deepseek| deepseek.api_key)
);
assert_eq!(Some("deepseek"), selected);
}
}
70 changes: 70 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
use std::process::Command;

use clap::Parser;
use llm::{IsLLMRequest, LLMModel, deepseek::DeepSeekRequest};
use rustyline::{Editor, error::ReadlineError};

use crate::llm::Message;

const MESSAGE_BEGIN: &str = "You are a great helper for learning algorithms.";
pub const READLINE_PROMPT_BASE: &str = "Axec> ";
pub const RUNNING: &str = "|/-\\";

pub mod cli;
pub mod llm;

pub fn run() {
let cli = cli::Cli::parse().load_config();
let mut messages = Vec::new();
messages.push(Message::from_system(MESSAGE_BEGIN));
let mut editor = Editor::<(), _>::new().expect("Failed to create editor");
println!("Welcome to Axec!");

let deepseek = cli.get_deepseek().expect("No deepseek configured");
let deepseek_apikey = deepseek
.api_key
.clone()
.expect("No deepseek apikey configured");

loop {
let readline = editor.readline(READLINE_PROMPT_BASE);
match readline {
Ok(user_input) => match try_execute(&user_input) {
Ok(output) => println!("{}", output),
Err(_err) => {
println!("Looks like input is not a valid command, so sent it to LLM\n");
let deepseek_req =
DeepSeekRequest::build(LLMModel::DeepSeekChat, messages.clone(), false);
messages.push(Message::from_user(&user_input));
let response = deepseek_req.send_request(&deepseek_apikey, &messages);
let json: serde_json::Value = response.unwrap().json().unwrap();
let llm_reply = json["choices"][0]["message"]["content"].as_str().unwrap();
println!("{}", llm_reply)
}
},
Err(ReadlineError::Eof) => {
println!("Exiting Axec");
break;
}
Err(_) => {
println!("Error reading input");
}
}
}
}

fn try_execute(line: &str) -> Result<String, String> {
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.is_empty() {
return Err("Empty command".to_string());
}

let (cmd, args) = parts.split_first().unwrap();

let output = Command::new(cmd)
.args(args)
.output()
.map_err(|e| format!("Failed to execute command: {e}"))?;

Ok(String::from_utf8_lossy(&output.stdout).to_string())
}
1 change: 1 addition & 0 deletions src/llm/claude.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

47 changes: 47 additions & 0 deletions src/llm/deepseek.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
use crate::llm::{IsLLMRequest, LLMModel, Message};

use reqwest::blocking;
use serde::Serialize;

pub const DEEPSEEK_ENDPOINT: &str = "https://api.deepseek.com/chat/completions";

#[derive(Serialize, Debug, Clone)]
pub struct DeepSeekRequest {
model: LLMModel,
messages: Vec<Message>,
stream: bool,
}
impl DeepSeekRequest {
pub const fn build(model: LLMModel, messages: Vec<Message>, stream: bool) -> Self {
let model = match model {
LLMModel::DeepSeekChat => model,
LLMModel::DeepSeekReasoner => model,
// _ => panic!("It is not a DeepSeek model!"),
};
Self {
model,
messages,
stream,
}
}
}
impl IsLLMRequest for DeepSeekRequest {
fn send_request(
&self,
api_key: &str,
messages: &[Message],
) -> anyhow::Result<blocking::Response> {
let deepseek_request =
DeepSeekRequest::build(Default::default(), messages.to_vec(), Default::default());
let client = reqwest::blocking::Client::new();

let response = client
.post(DEEPSEEK_ENDPOINT)
.header("Authorization", format!("Bearer {api_key}"))
.header("Content-Type", "application/json")
.json(&deepseek_request)
.send()?;

Ok(response)
}
}
82 changes: 82 additions & 0 deletions src/llm/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
use reqwest::blocking;
use serde::Serialize;

pub mod claude;
pub mod deepseek;
pub mod openai;

pub const CURRENT_ENV: &str = "CurrentEnvVars: ";
pub const SHELL_HISTORY: &str = "ShellHistory: ";
pub const NOTES: &str =
"Notes: Please put the final command in last single line and wrap it with <<>>";

// pub static RESTORED_MESSAGES: LazyLock<RwLock<Vec<Message>>> =
// LazyLock::new(|| RwLock::new(Vec::new()));

#[allow(async_fn_in_trait)]
pub trait IsLLMRequest {
fn send_request(
&self,
api_key: &str,
messages: &[Message],
) -> anyhow::Result<blocking::Response>;
}

#[derive(Debug, Clone, Default, Serialize)]
pub enum LLMModel {
#[default]
#[serde(rename = "deepseek-chat")]
DeepSeekChat,
#[serde(rename = "deepseek-reasoner")]
DeepSeekReasoner,
}
impl From<&str> for LLMModel {
fn from(model_name: &str) -> Self {
match model_name {
"deepseek-chat" => LLMModel::DeepSeekChat,
"deepseek-reasoner" => LLMModel::DeepSeekReasoner,
_ => panic!("No matched model!"),
}
}
}

#[derive(Clone, Debug, Serialize, Default)]
pub struct Message {
role: Role,
content: String,
}

impl Message {
pub const fn new(role: Role, content: String) -> Self {
Message { role, content }
}

/// User's message.
pub fn from_user(once_serialized_input: &str) -> Self {
Message {
content: once_serialized_input.to_string(),
role: Role::User,
}
}
pub fn from_assistant(once_serialized_input: &str) -> Self {
Message {
content: once_serialized_input.to_string(),
role: Role::Assistant,
}
}
pub fn from_system(once_serialized_input: &str) -> Self {
Message {
content: once_serialized_input.to_string(),
role: Role::System,
}
}
}

#[derive(Clone, Copy, Debug, Default, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum Role {
Assistant,
System,
#[default]
User,
}
Empty file added src/llm/openai.rs
Empty file.
3 changes: 3 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
fn main() {
axec::run()
}