Skip to content

Commit

Permalink
SystemPrompt
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathanhecl authored and xyproto committed Aug 3, 2024
1 parent 024739a commit 0af613a
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 17 deletions.
42 changes: 25 additions & 17 deletions v2/ollamaclient.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ type RequestOptions struct {
// GenerateRequest represents the request payload for generating output
type GenerateRequest struct {
Model string `json:"model"`
System string `json:"system,omitempty"`
Prompt string `json:"prompt,omitempty"`
Images []string `json:"images,omitempty"` // base64 encoded images
Stream bool `json:"stream,omitempty"`
Expand Down Expand Up @@ -85,6 +86,7 @@ type Config struct {
TrimSpace bool
Verbose bool
ContextLength int64
SystemPrompt string
Tools []json.RawMessage
}

Expand Down Expand Up @@ -161,6 +163,11 @@ func (oc *Config) SetReproducible(optionalSeed ...int) {
oc.SeedOrNegative = defaultFixedSeed
}

// SetSystemPrompt sets the system prompt for this Ollama config
func (oc *Config) SetSystemPrompt(prompt string) {
oc.SystemPrompt = prompt
}

// SetRandom configures the generated output to not be reproducible
func (oc *Config) SetRandom() {
oc.SeedOrNegative = -1
Expand Down Expand Up @@ -193,33 +200,34 @@ func (oc *Config) GetOutputChat(promptAndOptionalImages ...string) (OutputChat,
if seed < 0 {
temperature = oc.TemperatureIfNegativeSeed
}
messages := []Message{}
if oc.SystemPrompt != "" {
messages = append(messages, Message{
Role: "system",
Content: oc.SystemPrompt,
})
}
messages = append(messages, Message{
Role: "user",
Content: prompt,
})
var reqBody GenerateChatRequest
if len(images) > 0 {
reqBody = GenerateChatRequest{
Model: oc.ModelName,
Messages: []Message{
{
Role: "user",
Content: prompt,
},
},
Images: images,
Tools: oc.Tools,
Model: oc.ModelName,
Messages: messages,
Images: images,
Tools: oc.Tools,
Options: RequestOptions{
Seed: seed, // set to -1 to make it random
Temperature: temperature, // set to 0 together with a specific seed to make output reproducible
},
}
} else {
reqBody = GenerateChatRequest{
Model: oc.ModelName,
Messages: []Message{
{
Role: "user",
Content: prompt,
},
},
Tools: oc.Tools,
Model: oc.ModelName,
Messages: messages,
Tools: oc.Tools,
Options: RequestOptions{
Seed: seed, // set to -1 to make it random
Temperature: temperature, // set to 0 together with a specific seed to make output reproducible
Expand Down
2 changes: 2 additions & 0 deletions v2/stream.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ func (oc *Config) StreamOutput(callbackFunction func(string, bool), promptAndOpt
if len(images) > 0 {
reqBody = GenerateRequest{
Model: oc.ModelName,
System: oc.SystemPrompt,
Prompt: prompt,
Images: images,
Stream: true,
Expand All @@ -80,6 +81,7 @@ func (oc *Config) StreamOutput(callbackFunction func(string, bool), promptAndOpt
} else {
reqBody = GenerateRequest{
Model: oc.ModelName,
System: oc.SystemPrompt,
Prompt: prompt,
Stream: true,
Options: RequestOptions{
Expand Down
1 change: 1 addition & 0 deletions v2/tools_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ func TestTools(t *testing.T) {
t.Error("Expected to have 'llama3.1' model downloaded, but it's not present")
}

oc.SetSystemPrompt("You are a helpful assistant.")
oc.SetRandom()
oc.SetTool(json.RawMessage(`{
"type": "function",
Expand Down

0 comments on commit 0af613a

Please sign in to comment.