Skip to content

Add support for API Key Per Request #66

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions lib/ruby_llm.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ module RubyLLM
class Error < StandardError; end

class << self
def chat(model: nil, provider: nil)
Chat.new(model: model, provider: provider)
def chat(model: nil, provider: nil, config: configuration)
Chat.new(model: model, provider: provider, config: config)
end

def embed(...)
Expand All @@ -56,6 +56,8 @@ def config
@config ||= Configuration.new
end

alias configuration config

def logger
@logger ||= Logger.new(
$stdout,
Expand Down
13 changes: 10 additions & 3 deletions lib/ruby_llm/chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,15 @@ module RubyLLM
class Chat
include Enumerable

attr_reader :model, :messages, :tools
attr_reader :model, :messages, :tools, :config

def initialize(model: nil, provider: nil)
def initialize(model: nil, provider: nil, config: nil)
model_id = model || RubyLLM.config.default_model
with_model(model_id, provider: provider)
@temperature = 0.7
@messages = []
@tools = {}
@config = config
@on = {
new_message: nil,
end_message: nil
Expand Down Expand Up @@ -58,6 +59,12 @@ def with_temperature(temperature)
self
end

def with_config(&block)
@config = config.dup
yield @config if block
self
end

def on_new_message(&block)
@on[:new_message] = block
self
Expand All @@ -74,7 +81,7 @@ def each(&)

def complete(&)
@on[:new_message]&.call
response = @provider.complete(messages, tools: @tools, temperature: @temperature, model: @model.id, &)
response = @provider.complete(messages, tools: @tools, temperature: @temperature, model: @model.id, config: @config, &)
@on[:end_message]&.call(response)

add_message response
Expand Down
9 changes: 9 additions & 0 deletions lib/ruby_llm/configuration.rb
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,14 @@ def initialize
@default_embedding_model = 'text-embedding-3-small'
@default_image_model = 'dall-e-3'
end

def dup
config = self.class.new
instance_variables.each do |var|
value = instance_variable_get(var)
config.instance_variable_set(var, value)
end
config
end
end
end
43 changes: 22 additions & 21 deletions lib/ruby_llm/provider.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ module Provider
module Methods
extend Streaming

def complete(messages, tools:, temperature:, model:, &block) # rubocop:disable Metrics/MethodLength
def complete(messages, tools:, temperature:, model:, config:, &block) # rubocop:disable Metrics/MethodLength
normalized_temperature = if capabilities.respond_to?(:normalize_temperature)
capabilities.normalize_temperature(temperature, model)
else
Expand All @@ -24,9 +24,9 @@ def complete(messages, tools:, temperature:, model:, &block) # rubocop:disable M
stream: block_given?)

if block_given?
stream_response payload, &block
stream_response(payload, config, &block)
else
sync_response payload
sync_response(payload, config)
end
end

Expand All @@ -51,49 +51,50 @@ def paint(prompt, model:, size:)
parse_image_response(response)
end

def configured?
missing_configs.empty?
def configured?(config = nil)
config ||= RubyLLM.config
missing_configs(config).empty?
end

private

def missing_configs
def missing_configs(config)
configuration_requirements.select do |key|
value = RubyLLM.config.send(key)
value = config.send(key)
value.nil? || value.empty?
end
end

def ensure_configured!
return if configured?
def ensure_configured!(config)
return if configured?(config)

config_block = <<~RUBY
RubyLLM.configure do |config|
#{missing_configs.map { |key| "config.#{key} = ENV['#{key.to_s.upcase}']" }.join("\n ")}
RubyLLM.configure do |c|
#{missing_configs(config).map { |key| "c.#{key} = ENV['#{key.to_s.upcase}']" }.join("\n ")}
end
RUBY

raise ConfigurationError,
"#{slug} provider is not configured. Add this to your initialization:\n\n#{config_block}"
end

def sync_response(payload)
response = post completion_url, payload
parse_completion_response response
def sync_response(payload, config)
response = post(completion_url, payload, config)
parse_completion_response(response)
end

def post(url, payload)
connection.post url, payload do |req|
req.headers.merge! headers
def post(url, payload, config)
connection(config).post(url, payload) do |req|
req.headers.merge! headers(config)
yield req if block_given?
end
end

def connection # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
ensure_configured!
def connection(config) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
ensure_configured!(config)

@connection ||= Faraday.new(api_base) do |f| # rubocop:disable Metrics/BlockLength
f.options.timeout = RubyLLM.config.request_timeout
f.options.timeout = config.request_timeout

f.response :logger,
RubyLLM.logger,
Expand All @@ -107,7 +108,7 @@ def connection # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
end

f.request :retry, {
max: RubyLLM.config.max_retries,
max: config.max_retries,
interval: 0.05,
interval_randomness: 0.5,
backoff_factor: 2,
Expand Down
4 changes: 2 additions & 2 deletions lib/ruby_llm/providers/anthropic.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ def api_base
'https://api.anthropic.com'
end

def headers
def headers(config)
{
'x-api-key' => RubyLLM.config.anthropic_api_key,
'x-api-key' => config.anthropic_api_key,
'anthropic-version' => '2023-06-01'
}
end
Expand Down