Skip to content

Commit

Permalink
Merge pull request #12 from raphaelmansuy/feat/claude
Browse files Browse the repository at this point in the history
v0.1.9 / Better documentation and tests
  • Loading branch information
raphaelmansuy authored Apr 7, 2024
2 parents f300383 + 55c30e6 commit 7afb35f
Show file tree
Hide file tree
Showing 18 changed files with 421 additions and 47 deletions.
3 changes: 2 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@
"./Cargo.toml",
"./Cargo.toml"
],
"rust-analyzer.showUnlinkedFileNotification": false
"rust-analyzer.showUnlinkedFileNotification": false,
"rust-analyzer.checkOnSave": false
}
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "hiramu"
version = "0.1.8"
version = "0.1.9"
edition = "2021"
license = "MIT"
description = "A Rust AI Engineering Toolbox to Access Ollama, AWS Bedrock"
Expand Down
124 changes: 96 additions & 28 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,28 +28,29 @@ hiramu = "0.1.8"
### Generating Text with Mistral

```rust
use hiramu::bedrock::model_info::{ModelInfo, ModelName};
use hiramu::bedrock::models::mistral::mistral_client::{MistralClient, MistralOptions};
use hiramu::bedrock::models::mistral::mistral_request_message::MistralRequestBuilder;
use hiramu::bedrock::model_info::{ModelInfo, ModelName};

#[tokio::main]
async fn main() {
async fn generating_text_with_mistral() {
let mistral_options = MistralOptions::new()
.profile_name("bedrock")
.region("us-west-2");

let client = MistralClient::new(mistral_options).await;

let request = MistralRequestBuilder::new("<s>[INST] What is the capital of France?[/INST]".to_string())
.max_tokens(200)
.temperature(0.8)
.build();
let request =
MistralRequestBuilder::new("<s>[INST] What is the capital of France?[/INST]".to_string())
.max_tokens(200)
.temperature(0.8)
.build();

let model_id = ModelInfo::from_model_name(ModelName::MistralMixtral8X7BInstruct0x);
let response = client.generate(model_id, &request).await.unwrap();

println!("Response: {:?}", response.outputs.text);
println!("Response: {:?}", response.outputs[0].text);
}

```

### Streaming Text Generation with Mistral
Expand All @@ -60,8 +61,7 @@ use hiramu::bedrock::models::mistral::mistral_client::{MistralClient, MistralOpt
use hiramu::bedrock::models::mistral::mistral_request_message::MistralRequestBuilder;
use hiramu::bedrock::model_info::{ModelInfo, ModelName};

#[tokio::main]
async fn main() {
pub async fn generating_text_with_mistral() {
let mistral_options = MistralOptions::new()
.profile_name("bedrock")
.region("us-west-2");
Expand All @@ -79,7 +79,7 @@ async fn main() {
while let Some(result) = stream.next().await {
match result {
Ok(response) => {
println!("Response: {:?}", response.outputs.text);
println!("Response: {:?}", response.outputs[0].text);
}
Err(err) => {
eprintln!("Error: {:?}", err);
Expand All @@ -92,12 +92,14 @@ async fn main() {
### Generating Text with Ollama

```rust
use std::io::Write;

use futures::TryStreamExt;

use hiramu::ollama::ollama_client::OllamaClient;
use hiramu::ollama::model::{GenerateRequest, GenerateRequestBuilder};
use futures::stream::TryStreamExt;
use hiramu::ollama::model::{GenerateRequestBuilder};

#[tokio::main]
async fn main() {
async fn generating_text_with_ollama() {
let client = OllamaClient::new("http://localhost:11434".to_string());
let request = GenerateRequestBuilder::new("mistral".to_string())
.prompt("Once upon a time".to_string())
Expand All @@ -107,7 +109,8 @@ async fn main() {

response_stream
.try_for_each(|chunk| async move {
println!("{}", chunk.response);
print!("{}", chunk.response);
std::io::stdout().flush()?;
Ok(())
})
.await
Expand All @@ -118,12 +121,17 @@ async fn main() {
### Chatting with Claude using Bedrock

```rust
use hiramu::bedrock::models::claude::claude_client::{ClaudeClient, ClaudeOptions};
use hiramu::bedrock::models::claude::claude_request_message::{ChatOptions, ConversationRequest, Message};
use std::io::Write;

use futures::TryStreamExt;

use hiramu::bedrock::model_info::{ModelInfo, ModelName};
use hiramu::bedrock::models::claude::claude_client::{ClaudeClient, ClaudeOptions};
use hiramu::bedrock::models::claude::claude_request_message::{
ChatOptions, ContentBlockDelta, ConversationRequest, Message, StreamResultData,
};

#[tokio::main]
async fn main() {
pub async fn chat_with_claude() {
let claude_options = ClaudeOptions::new()
.profile_name("bedrock")
.region("us-west-2");
Expand All @@ -138,7 +146,9 @@ async fn main() {
let chat_options = ChatOptions::default()
.with_temperature(0.7)
.with_max_tokens(100)
.with_model_id(ModelInfo::from_model_name(ModelName::AnthropicClaudeHaiku1x));
.with_model_id(ModelInfo::from_model_name(
ModelName::AnthropicClaudeHaiku1x,
));

let response_stream = client
.chat_with_stream(&conversation_request, &chat_options)
Expand All @@ -147,23 +157,38 @@ async fn main() {

response_stream
.try_for_each(|chunk| async move {
println!("{:?}", chunk);
match chunk {
StreamResultData::ContentBlockStart(..) => {
println!("\n------------------------------");
}
StreamResultData::ContentBlockStop(..) => {
println!("\n------------------------------");
}
StreamResultData::ContentBlockDelta(ContentBlockDelta { delta, .. }) => {
print!("{}", delta.text);
std::io::stdout().flush().unwrap();
}
_ => {}
}
Ok(())
})
.await
.unwrap();
}
```

### Sending Images with Claude
### Working with Images with Claude

```rust
use std::io::Write;

use futures::TryStreamExt;

use hiramu::bedrock::models::claude::claude_client::{ClaudeClient, ClaudeOptions};
use hiramu::bedrock::models::claude::claude_request_message::{ChatOptions, ConversationRequest, Message};
use hiramu::bedrock::models::claude::claude_request_message::{ChatOptions, ContentBlockDelta, ConversationRequest, Message, StreamResultData};
use hiramu::fetch_and_base64_encode_image;

#[tokio::main]
async fn main() {
async fn image_with_claude() {
let claude_options = ClaudeOptions::new()
.profile_name("bedrock")
.region("us-west-2");
Expand All @@ -189,14 +214,28 @@ async fn main() {
.await
.unwrap();

response_stream
response_stream
.try_for_each(|chunk| async move {
println!("{:?}", chunk);
match chunk {
StreamResultData::ContentBlockStart(..) => {
println!("\n------------------------------");
}
StreamResultData::ContentBlockStop(..) => {
println!("\n------------------------------");
}

StreamResultData::ContentBlockDelta(ContentBlockDelta { delta, .. }) => {
print!("{}", delta.text);
std::io::stdout().flush().unwrap();
}
_ => {}
}
Ok(())
})
.await
.unwrap();
}

```

### Using the Raw Bedrock API
Expand Down Expand Up @@ -290,6 +329,35 @@ async fn main() {
}
```

## Using Embeddings with Ollama

```rust
use hiramu::ollama::{EmbeddingsRequestBuilder, OllamaClient};

pub async fn demo_ollama_embedding() -> Result<(), Box<dyn std::error::Error>> {
let client = OllamaClient::new("http://localhost:11434".to_string());

let prompt = "The quick brown fox jumps over the lazy dog.";

let request = EmbeddingsRequestBuilder::new("nomic-embed-text".to_string(), prompt.to_string())
.keep_alive("10m".to_string())
.build();

match client.embeddings(request).await {
Ok(response) => {
// Print embeddings dimensions
println!("Embeddings dimensions: {:?}", response.embedding.len());
println!("Embeddings: {:?}", response);
}
Err(error) => {
eprintln!("Error: {:?}", error);
}
}

Ok(())
}
```

## Examples

Here is a table with a description for each example:
Expand Down
2 changes: 1 addition & 1 deletion src/bedrock/models/claude/claude_request_message.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use serde::{Deserialize, Serialize};

use super::ClaudeError;

pub struct ChatOptions {
pub model_id: String,
Expand Down Expand Up @@ -339,6 +338,7 @@ pub struct StreamContentBlockDelta {
}

#[derive(Debug, Serialize, Deserialize)]

pub struct StreamDelta {
text: String,
#[serde(rename = "type")]
Expand Down
2 changes: 1 addition & 1 deletion src/bedrock/models/mistral/mistral_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ mod tests {


let mut stream = client
.generate_with_stream("mistral.mistral-7b-instruct-v0:2".to_string(), &request)
.generate_with_stream(model_name.to_owned(), &request)
.await
.unwrap();

Expand Down
1 change: 1 addition & 0 deletions src/examples/demo_claude_chat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ pub async fn demo_chat_claude() {

match response {
Ok(response) => {
println!("{:?}", response);
let json_display = serde_json::to_string_pretty(&response).unwrap();
println!("{:?}", json_display);
}
Expand Down
46 changes: 35 additions & 11 deletions src/examples/demo_claude_chat_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,12 @@ use futures::TryStreamExt;

use crate::bedrock::model_info::{ModelInfo, ModelName};
use crate::bedrock::models::claude::claude_client::{ClaudeClient, ClaudeOptions};
use crate::bedrock::models::claude::claude_request_message::ChatOptions;
use crate::bedrock::models::claude::claude_request_message::ConversationRequest;
use crate::bedrock::models::claude::claude_request_message::Message;
use crate::bedrock::models::claude::claude_request_message::{ChatOptions, StreamResultData};

pub async fn demo_chat_claude_with_stream() {

let claude_options
= ClaudeOptions::new()
let claude_options = ClaudeOptions::new()
.profile_name("bedrock")
.region("us-west-2");

Expand All @@ -29,14 +27,13 @@ pub async fn demo_chat_claude_with_stream() {
);

let chat_options = ChatOptions::default()
.with_model_id(ModelInfo::from_model_name(ModelName::AnthropicClaudeHaiku1x))
.with_model_id(ModelInfo::from_model_name(
ModelName::AnthropicClaudeHaiku1x,
))
.with_temperature(0.5);

let response_stream = client
.chat_with_stream(
&conversation_request,
&chat_options
)
.chat_with_stream(&conversation_request, &chat_options)
.await;

let response_stream = match response_stream {
Expand All @@ -50,13 +47,40 @@ pub async fn demo_chat_claude_with_stream() {
// consumme the stream and print the response
response_stream
.try_for_each(|chunk| async move {
let json_display = serde_json::to_string_pretty(&chunk).unwrap();
println!("{:?}", json_display);
display_streamresult_data(chunk);
Ok(())
})
.await
.unwrap();
}

fn display_streamresult_data(data: StreamResultData) {
match data {
StreamResultData::ContentBlockStart(content_block_start) => {
println!("ContentBlockStart: {:?}", content_block_start);
}
StreamResultData::ContentBlockStop(content_block_end) => {
println!("ContentBlockEnd: {:?}", content_block_end);
}
StreamResultData::MessageStart(message_start) => {
println!("MessageStart: {:?}", message_start);
}
StreamResultData::MessageStop(message_end) => {
println!("MessageStop: {:?}", message_end);
}
StreamResultData::MessageDelta(message_delta) => {
println!("MessageDelta: {:?}", message_delta);
}
StreamResultData::ContentBlockStart(content_block_start) => {
println!("ContentBlockStart: {:?}", content_block_start);
}
StreamResultData::ContentBlockStop(content_block_end) => {
println!("ContentBlockEnd: {:?}", content_block_end);
}
StreamResultData::ContentBlockDelta(content_block_delta) => {
println!("ContentBlockDelta: {:?}", content_block_delta);
}
}
}

// Test
Expand Down
2 changes: 1 addition & 1 deletion src/examples/demo_claude_multimedia.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use futures::TryStreamExt;
use crate::{bedrock::models::claude::{claude_client::{ClaudeClient, ClaudeOptions}, claude_request_message::{ChatOptions, ConversationRequest, Message}}, fetch_and_base64_encode_image};


async fn demo_claude_multimedia() {
pub async fn demo_claude_multimedia() {
let claude_options = ClaudeOptions::new().profile_name("bedrock").region("us-west-2");

let client = ClaudeClient::new(claude_options).await;
Expand Down
1 change: 0 additions & 1 deletion src/examples/demo_ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ pub async fn print_generate_response(

#[cfg(test)]
mod tests {
use super::*;

#[tokio::test]
async fn test_chat_response_loop() {
Expand Down
2 changes: 1 addition & 1 deletion src/examples/demo_ollama_embedding.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::ollama::{EmbeddingsRequestBuilder, OllamaClient};

async fn demo_ollama_embedding() -> Result<(), Box<dyn std::error::Error>> {
pub async fn demo_ollama_embedding() -> Result<(), Box<dyn std::error::Error>> {
let client = OllamaClient::new("http://localhost:11434".to_string());

let prompt = "The quick brown fox jumps over the lazy dog.";
Expand Down
2 changes: 2 additions & 0 deletions src/examples/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
pub mod simple_examples;

pub mod demo_ollama;
pub mod demo_bedrock_raw_generate;
pub mod demo_bedrock_raw_stream;
Expand Down
Loading

0 comments on commit 7afb35f

Please sign in to comment.