Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 20 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,15 @@ If you have docker installed, you can use the following command to run the tool,
installation:

```bash
# Display help
docker run ghcr.io/mutablelogic/go-llm:latest --help
# Display version, help
docker run ghcr.io/mutablelogic/go-llm version
docker run ghcr.io/mutablelogic/go-llm --help

# Interact with Claude to retrieve news headlines, assuming
# you have an API key for Anthropic and NewsAPI
docker run \
-e OLLAMA_URL -e MISTRAL_API_KEY -e ANTHROPIC_API_KEY -e OPENAI_API_KEY \
-e NEWSAPI_KEY \
ghcr.io/mutablelogic/go-llm:latest \
chat mistral-small-latest --prompt "What is the latest news?" --no-stream
# you have an API key for both Anthropic and NewsAPI
docker run -e ANTHROPIC_API_KEY -e NEWSAPI_KEY \
ghcr.io/mutablelogic/go-llm \
chat mistral-small-latest --prompt "What is the latest news?"
```

See below for more information on how to use the command-line tool (or how to
Expand Down Expand Up @@ -559,17 +558,24 @@ LLM agent command line interface
Flags:
-h, --help Show context-sensitive help.
--debug Enable debug output
--verbose Enable verbose output
-v, --verbose Enable verbose output
--timeout=DURATION Agent connection timeout
--ollama-endpoint=STRING Ollama endpoint ($OLLAMA_URL)
--anthropic-key=STRING Anthropic API Key ($ANTHROPIC_API_KEY)
--mistral-key=STRING Mistral API Key ($MISTRAL_API_KEY)
--open-ai-key=STRING OpenAI API Key ($OPENAI_API_KEY)
--gemini-key=STRING Gemini API Key ($GEMINI_API_KEY)
--news-key=STRING News API Key ($NEWSAPI_KEY)

Commands:
agents Return a list of agents
models Return a list of models
tools Return a list of tools
download Download a model
chat Start a chat session
agents Return a list of agents
models Return a list of models
tools Return a list of tools
download Download a model
chat Start a chat session
complete Complete a prompt
embedding Generate an embedding
version Print the version of this tool

Run "llm <command> --help" for more information on a command.
```
Expand Down
22 changes: 22 additions & 0 deletions cmd/examples/agents/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package main

import (
"fmt"
"os"

"github.com/mutablelogic/go-llm/pkg/agent"
)

func main() {
// Create a new agent which aggregates multiple providers
agent, err := agent.New(
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
agent.WithOllama(os.Getenv("OLLAMA_URL")),
)
if err != nil {
panic(err)
}
fmt.Println("Running agents are: ", agent.Name())
}
41 changes: 41 additions & 0 deletions cmd/examples/completion/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package main

import (
"context"
"fmt"
"os"

"github.com/mutablelogic/go-llm/pkg/agent"
)

func main() {
// Create a new agent which aggregates multiple providers
agent, err := agent.New(
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
agent.WithOllama(os.Getenv("OLLAMA_URL")),
)
if err != nil {
panic(err)
}

// Get a model
if len(os.Args) != 3 {
fmt.Println("Usage: completion <model> <prompt>")
os.Exit(-1)
}

model, err := agent.GetModel(context.TODO(), os.Args[1])
if err != nil {
panic(err)
}

// Get completion
completion, err := model.Completion(context.TODO(), os.Args[2])
if err != nil {
panic(err)
}

fmt.Println("Completion is: ", completion)
}
41 changes: 41 additions & 0 deletions cmd/examples/embedding/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package main

import (
"context"
"fmt"
"os"

"github.com/mutablelogic/go-llm/pkg/agent"
)

func main() {
// Create a new agent which aggregates multiple providers
agent, err := agent.New(
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
agent.WithOllama(os.Getenv("OLLAMA_URL")),
)
if err != nil {
panic(err)
}

// Get a model
if len(os.Args) != 3 {
fmt.Println("Usage: embedding <model> <prompt>")
os.Exit(-1)
}

model, err := agent.GetModel(context.TODO(), os.Args[1])
if err != nil {
panic(err)
}

// Get embedding vector
vector, err := model.Embedding(context.TODO(), os.Args[2])
if err != nil {
panic(err)
}

fmt.Println("Vector is: ", vector)
}
26 changes: 26 additions & 0 deletions cmd/examples/models/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package main

import (
"context"
"fmt"
"os"

"github.com/mutablelogic/go-llm/pkg/agent"
)

func main() {
// Create a new agent which aggregates multiple providers
agent, err := agent.New(
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
agent.WithOllama(os.Getenv("OLLAMA_URL")),
)
if err != nil {
panic(err)
}

// Return models
models, err := agent.ListModels(context.TODO())
fmt.Println("Availalable models are: ", models)
}