Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

De-coupled Tools for Marketplace #17

Open
wants to merge 69 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
69 commits
Select commit Hold shift + click to select a range
e82d3f3
adding google search toolkit
AdityaSharma13064 Sep 7, 2023
ceeb683
deleting google toolkit
AdityaSharma13064 Sep 7, 2023
61be8e7
adding google search toolkit
AdityaSharma13064 Sep 7, 2023
67d5aa0
adding requirements.txt
AdityaSharma13064 Sep 7, 2023
66d2ff3
remove extra file
AdityaSharma13064 Sep 7, 2023
3bc61e3
fixing
AdityaSharma13064 Sep 7, 2023
81868d8
adding config.yaml
AdityaSharma13064 Sep 7, 2023
016c0b2
adding apollo toolkit
AdityaSharma13064 Sep 11, 2023
af79d87
removing apollo
AdityaSharma13064 Sep 11, 2023
28d7ef5
adding apollo toolkit
AdityaSharma13064 Sep 11, 2023
64ecbe3
adding google serp search toolkit
AdityaSharma13064 Sep 11, 2023
248acef
adding github toolkit
AdityaSharma13064 Sep 12, 2023
e8e50e1
adding searx toolkit
AdityaSharma13064 Sep 13, 2023
ddd9619
adding slack toolkit
AdityaSharma13064 Sep 13, 2023
516e3b2
adding more tools
AdityaSharma13064 Sep 13, 2023
1aeb17b
adding test for google serp and google search toolkit
AdityaSharma13064 Sep 13, 2023
56e0182
adding unit tests
AdityaSharma13064 Sep 14, 2023
c879b68
adding slack unit test
AdityaSharma13064 Sep 14, 2023
66e2088
removing bugs in github file
Sep 15, 2023
990ae6c
removing bugs
Sep 15, 2023
69c77cc
addiing file path of jira toolkit
Sep 16, 2023
6caaa91
fix
Sep 16, 2023
e07bbc8
fixes
Sep 16, 2023
79bd347
fixing-2
Sep 16, 2023
e640d15
adding webscaper toolkit
Sep 17, 2023
5e959ab
adding helper for webscaper
Sep 18, 2023
92262e0
fixing
Sep 18, 2023
4e0a6bb
fixing jira
Sep 18, 2023
f42dd3a
fixing jira
Sep 18, 2023
7a5a48e
adding fixes
Sep 18, 2023
09d3c16
fixing webscaper
Sep 18, 2023
a72c28d
adding helper for toolkits
Sep 18, 2023
ea93559
adding assets for file toolkit
Sep 19, 2023
ed519ea
fixing base_file
Sep 20, 2023
23a8f23
adding apollo search kit
Sep 20, 2023
05517b8
adding resource helper
Sep 20, 2023
8dc3742
bug fix
Sep 20, 2023
2531e7a
bugfix
Sep 20, 2023
93ce083
bug fixing
Sep 20, 2023
69af4bd
adding bug fix
Sep 20, 2023
4407219
adding fix
Sep 20, 2023
8b7b09e
adding fix
Sep 20, 2023
918ddac
adding fix
Sep 20, 2023
b6576ad
adding fix
Sep 20, 2023
70d3424
adding fix
Sep 20, 2023
ee70ef2
adding fix
Sep 20, 2023
40a286a
adding fix
Sep 20, 2023
0e088f6
adding fix
Sep 20, 2023
8fbc95d
adding fix
Sep 20, 2023
f9bc3b1
adding fix
Sep 20, 2023
e64b1d1
adding apollo search kit
Sep 21, 2023
2b3464f
adding apollo search kit
Sep 21, 2023
5647733
adding assets
Sep 22, 2023
4632f6e
adding temp file
Sep 23, 2023
ff09432
adding email toolkit
Sep 25, 2023
0407d6a
adding logs
Sep 25, 2023
b1b320b
bug resolve
Sep 25, 2023
5bbe373
adding complete path
Sep 25, 2023
4b7b84e
adding changes
Sep 26, 2023
cf8c2c9
adding changes
Sep 26, 2023
c4625a1
adding instagram toolkit
Sep 26, 2023
2e9c560
adding changes
Sep 26, 2023
16056cd
adding more tools
Sep 28, 2023
7add4eb
adding requirements for instragram
Sep 28, 2023
090c6ca
removing basetool
Sep 28, 2023
b7c9d8d
adding coding toolkit changes
Sep 28, 2023
59b31ff
adding correct imports
Sep 28, 2023
2ca6334
adding final changes
Sep 28, 2023
e7b3c50
adding assets folder
Sep 28, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
adding coding toolkit changes
  • Loading branch information
Aditya Sharma authored and Aditya Sharma committed Sep 28, 2023
commit b7c9d8db0a9262639aa02dd2df59fa6d2c361100
8 changes: 4 additions & 4 deletions code/coding_toolkit.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
from typing import List

from superagi.tools.base_tool import BaseToolkit, BaseTool, ToolConfiguration
from superagi.tools.code.improve_code import ImproveCodeTool
from superagi.tools.code.write_code import CodingTool
from superagi.tools.code.write_spec import WriteSpecTool
from superagi.tools.code.write_test import WriteTestTool
from improve_code import ImproveCodeTool
from write_code import CodingTool
from write_spec import WriteSpecTool
from write_test import WriteTestTool


class CodingToolkit(BaseToolkit, ABC):
Expand Down
27 changes: 27 additions & 0 deletions code/helper/prompt_reader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from pathlib import Path


class PromptReader:
@staticmethod
def read_tools_prompt(current_file: str, prompt_file: str) -> str:
file_path = str(Path(current_file).resolve().parent) + "/prompts/" + prompt_file
try:
f = open(file_path, "r")
file_content = f.read()
f.close()
except FileNotFoundError as e:
print(e.__str__())
raise e
return file_content

@staticmethod
def read_agent_prompt(current_file: str, prompt_file: str) -> str:
file_path = str(Path(current_file).resolve().parent) + "/prompts/" + prompt_file
try:
f = open(file_path, "r")
file_content = f.read()
f.close()
except FileNotFoundError as e:
print(e.__str__())
raise e
return file_content
98 changes: 98 additions & 0 deletions code/helper/token_counter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from typing import List

import tiktoken

from superagi.types.common import BaseMessage
from superagi.lib.logger import logger
from superagi.models.models import Models
from sqlalchemy.orm import Session


class TokenCounter:

def __init__(self, session:Session=None, organisation_id: int=None):
self.session = session
self.organisation_id = organisation_id

def token_limit(self, model: str = "gpt-3.5-turbo-0301") -> int:
"""
Function to return the token limit for a given model.

Args:
model (str): The model to return the token limit for.

Raises:
KeyError: If the model is not found.

Returns:
int: The token limit.
"""
try:
model_token_limit_dict = (Models.fetch_model_tokens(self.session, self.organisation_id))
return model_token_limit_dict[model]
except KeyError:
logger.warning("Warning: model not found. Using cl100k_base encoding.")
return 8092

@staticmethod
def count_message_tokens(messages: List[BaseMessage], model: str = "gpt-3.5-turbo-0301") -> int:
"""
Function to count the number of tokens in a list of messages.

Args:
messages (List[BaseMessage]): The list of messages to count the tokens for.
model (str): The model to count the tokens for.

Raises:
KeyError: If the model is not found.

Returns:
int: The number of tokens in the messages.
"""
try:
default_tokens_per_message = 4
model_token_per_message_dict = {"gpt-3.5-turbo-0301": 4, "gpt-4-0314": 3, "gpt-3.5-turbo": 4, "gpt-4": 3,
"gpt-3.5-turbo-16k": 4, "gpt-4-32k": 3, "gpt-4-32k-0314": 3,
"models/chat-bison-001": 4}
encoding = tiktoken.encoding_for_model(model)
except KeyError:
logger.warning("Warning: model not found. Using cl100k_base encoding.")
encoding = tiktoken.get_encoding("cl100k_base")

if model in model_token_per_message_dict.keys():
tokens_per_message = model_token_per_message_dict[model]
else:
tokens_per_message = default_tokens_per_message

if tokens_per_message is None:
raise NotImplementedError(
f"num_tokens_from_messages() is not implemented for model {model}.\n"
" See https://github.com/openai/openai-python/blob/main/chatml.md for"
" information on how messages are converted to tokens."
)

num_tokens = 0
for message in messages:
if isinstance(message, str):
message = {'content': message}
num_tokens += tokens_per_message
num_tokens += len(encoding.encode(message['content']))

num_tokens += 3
print("tokens",num_tokens)
return num_tokens

@staticmethod
def count_text_tokens(message: str) -> int:
"""
Function to count the number of tokens in a text.

Args:
message (str): The text to count the tokens for.

Returns:
int: The number of tokens in the text.
"""
encoding = tiktoken.get_encoding("cl100k_base")
num_tokens = len(encoding.encode(message)) + 4
return num_tokens
6 changes: 3 additions & 3 deletions code/improve_code.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
from pydantic import BaseModel, Field

from superagi.agent.agent_prompt_builder import AgentPromptBuilder
from superagi.helper.prompt_reader import PromptReader
from superagi.helper.token_counter import TokenCounter
from helper.prompt_reader import PromptReader
from helper.token_counter import TokenCounter
from superagi.lib.logger import logger
from superagi.llms.base_llm import BaseLlm
from superagi.resource_manager.file_manager import FileManager
from superagi.tools.base_tool import FileManager
from superagi.tools.base_tool import BaseTool
from superagi.tools.tool_response_query_manager import ToolResponseQueryManager

Expand Down
11 changes: 11 additions & 0 deletions code/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
pydantic==1.10.8
pytest==7.4.0
pytest-mock==3.11.1
requests==2.31.0
superagi-tools==1.0.6
tiktoken==0.4.0
typing_extensions==4.7.1
urllib3==1.26.16
filelock==3.12.0
requests-file==1.5.1
watchfiles==0.19.0
6 changes: 3 additions & 3 deletions code/write_code.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
from pydantic import BaseModel, Field

from superagi.agent.agent_prompt_builder import AgentPromptBuilder
from superagi.helper.prompt_reader import PromptReader
from superagi.helper.token_counter import TokenCounter
from helper.prompt_reader import PromptReader
from helper.token_counter import TokenCounter
from superagi.lib.logger import logger
from superagi.llms.base_llm import BaseLlm
from superagi.resource_manager.file_manager import FileManager
from superagi.tools.base_tool import FileManager
from superagi.tools.base_tool import BaseTool
from superagi.tools.tool_response_query_manager import ToolResponseQueryManager
from superagi.models.agent import Agent
Expand Down
6 changes: 3 additions & 3 deletions code/write_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
from pydantic import BaseModel, Field

from superagi.agent.agent_prompt_builder import AgentPromptBuilder
from superagi.helper.prompt_reader import PromptReader
from superagi.helper.token_counter import TokenCounter
from helper.prompt_reader import PromptReader
from helper.token_counter import TokenCounter
from superagi.lib.logger import logger
from superagi.llms.base_llm import BaseLlm
from superagi.resource_manager.file_manager import FileManager
from superagi.tools.base_tool import FileManager
from superagi.tools.base_tool import BaseTool
from superagi.models.agent import Agent

Expand Down
9 changes: 5 additions & 4 deletions code/write_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,18 @@
from typing import Type, Optional, List

from pydantic import BaseModel, Field

from superagi.agent.agent_prompt_builder import AgentPromptBuilder
from superagi.helper.prompt_reader import PromptReader
from superagi.helper.token_counter import TokenCounter
from superagi.lib.logger import logger
from superagi.llms.base_llm import BaseLlm
from superagi.resource_manager.file_manager import FileManager
from superagi.tools.base_tool import BaseTool
from superagi.tools.tool_response_query_manager import ToolResponseQueryManager
from superagi.models.agent import Agent

from superagi.tools.base_tool import FileManager

from helper.prompt_reader import PromptReader
from helper.token_counter import TokenCounter

class WriteTestSchema(BaseModel):
test_description: str = Field(
...,
Expand Down