Skip to content

Commit

Permalink
refactor: change to gpt_tools.py for clarity
Browse files Browse the repository at this point in the history
  • Loading branch information
sean1832 committed Feb 26, 2023
1 parent 2902643 commit 9c535a1
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion GPT/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from GPT import query
from GPT import toolkit
from GPT import gpt_tools
from GPT import model
File renamed without changes.
12 changes: 6 additions & 6 deletions GPT/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def build(chunk_size=4000):
chunk_count = len(chunks)
result = []
for idx, chunk in enumerate(chunks):
embedding = GPT.toolkit.embedding(chunk.encode(encoding='ASCII', errors='ignore').decode())
embedding = GPT.gpt_tools.embedding(chunk.encode(encoding='ASCII', errors='ignore').decode())
info = {'content': chunk, 'vector': embedding}
print(info, '\n\n\n')

Expand All @@ -38,7 +38,7 @@ def build(chunk_size=4000):
def run(query, model, prompt_file, isQuestion, params, info_file=None):
if isQuestion:
data = util.read_json(INFO.BRAIN_DATA)
results = GPT.toolkit.search_chunks(query, data, params.chunk_count)
results = GPT.gpt_tools.search_chunks(query, data, params.chunk_count)
answers = []
for result in results:
my_info = util.read_file(info_file)
Expand All @@ -47,15 +47,15 @@ def run(query, model, prompt_file, isQuestion, params, info_file=None):
prompt = prompt.replace('<<QS>>', query)
prompt = prompt.replace('<<MY-INFO>>', my_info)

answer = GPT.toolkit.gpt3(prompt, model, params)
answer = GPT.gpt_tools.gpt3(prompt, model, params)
answers.append(answer)
all_response = '\n\n'.join(answers)
else:
chunks = textwrap.wrap(query, 10000)
responses = []
for chunk in chunks:
prompt = util.read_file(prompt_file).replace('<<DATA>>', chunk)
response = GPT.toolkit.gpt3(prompt, model, params)
response = GPT.gpt_tools.gpt3(prompt, model, params)
responses.append(response)
all_response = '\n\n'.join(responses)
return all_response
Expand All @@ -65,7 +65,7 @@ def get_stream_prompt(query, prompt_file, isQuestion, info_file=None):
openai.api_key = API_KEY
if isQuestion:
data = util.read_json(INFO.BRAIN_DATA)
result = GPT.toolkit.search_chunks(query, data, count=1)
result = GPT.gpt_tools.search_chunks(query, data, count=1)
my_info = util.read_file(info_file)
prompt = util.read_file(prompt_file)
prompt = prompt.replace('<<INFO>>', result[0]['content'])
Expand All @@ -79,5 +79,5 @@ def get_stream_prompt(query, prompt_file, isQuestion, info_file=None):

def run_stream(query, model, prompt_file, isQuestion, params, info_file=None):
prompt = get_stream_prompt(query, prompt_file, isQuestion, info_file)
client = GPT.toolkit.gpt3_stream(API_KEY, prompt, model, params)
client = GPT.gpt_tools.gpt3_stream(API_KEY, prompt, model, params)
return client

0 comments on commit 9c535a1

Please sign in to comment.