Skip to content

Commit

Permalink
Added support for local models
Browse files Browse the repository at this point in the history
  • Loading branch information
Jonathan Dunn authored and Jonathan Dunn committed Mar 5, 2024
1 parent 37efb69 commit 0ce5ed2
Show file tree
Hide file tree
Showing 2 changed files with 58 additions and 31 deletions.
8 changes: 7 additions & 1 deletion installer/client/cli/fabric.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ def main():
parser.add_argument(
"--setup", help="Set up your fabric instance", action="store_true"
)
parser.add_argument(
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
parser.add_argument(
"--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview"
)
Expand Down Expand Up @@ -90,7 +92,11 @@ def main():
if not os.path.exists(os.path.join(config, "context.md")):
print("Please create a context.md file in ~/.config/fabric")
sys.exit()
standalone = Standalone(args, args.pattern)
standalone = None
if args.local:
standalone = Standalone(args, args.pattern, local=True)
else:
standalone = Standalone(args, args.pattern)
if args.list:
try:
direct = sorted(os.listdir(config_patterns_directory))
Expand Down
81 changes: 51 additions & 30 deletions installer/client/cli/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import requests
import os
from openai import OpenAI
import asyncio
import pyperclip
import sys
import platform
Expand All @@ -15,7 +16,7 @@


class Standalone:
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"):
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env", local=False):
""" Initialize the class with the provided arguments and environment file.
Args:
Expand Down Expand Up @@ -44,10 +45,24 @@ def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"):
except FileNotFoundError:
print("No API key found. Use the --apikey option to set the key")
sys.exit()
self.local = local
self.config_pattern_directory = config_directory
self.pattern = pattern
self.args = args
self.model = args.model
if self.local:
if self.args.model == 'gpt-4-turbo-preview':
self.args.model = 'llama2'

async def localChat(self, messages):
from ollama import AsyncClient
response = await AsyncClient().chat(model=self.args.model, messages=messages)
print(response['message']['content'])

async def localStream(self, messages):
from ollama import AsyncClient
async for part in await AsyncClient().chat(model=self.args.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)

def streamMessage(self, input_data: str, context=""):
""" Stream a message and handle exceptions.
Expand Down Expand Up @@ -87,26 +102,29 @@ def streamMessage(self, input_data: str, context=""):
else:
messages = [user_message]
try:
stream = self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1,
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
char = chunk.choices[0].delta.content
buffer += char
if char not in ["\n", " "]:
print(char, end="")
elif char == " ":
print(" ", end="") # Explicitly handle spaces
elif char == "\n":
print() # Handle newlines
sys.stdout.flush()
if self.local:
asyncio.run(self.localStream(messages))
else:
stream = self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1,
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
char = chunk.choices[0].delta.content
buffer += char
if char not in ["\n", " "]:
print(char, end="")
elif char == " ":
print(" ", end="") # Explicitly handle spaces
elif char == "\n":
print() # Handle newlines
sys.stdout.flush()
except Exception as e:
print(f"Error: {e}")
print(e)
Expand Down Expand Up @@ -153,15 +171,18 @@ def sendMessage(self, input_data: str, context=""):
else:
messages = [user_message]
try:
response = self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1,
)
print(response.choices[0].message.content)
if self.local:
asyncio.run(self.localChat(messages))
else:
response = self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1,
)
print(response.choices[0].message.content)
except Exception as e:
print(f"Error: {e}")
print(e)
Expand Down

0 comments on commit 0ce5ed2

Please sign in to comment.