Skip to content

Commit

Permalink
Merge branch 'microsoft:main' into arjun-g-dev-tool-config
Browse files Browse the repository at this point in the history
  • Loading branch information
luxzoli authored Sep 25, 2024
2 parents b2971e9 + 2e1f788 commit 64af078
Show file tree
Hide file tree
Showing 38 changed files with 1,138 additions and 253 deletions.
14 changes: 14 additions & 0 deletions .github/workflows/contrib-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,17 @@ jobs:
image: mongodb/mongodb-atlas-local:latest
ports:
- 27017:27017
couchbase:
image: couchbase:enterprise-7.6.3
ports:
- "8091-8095:8091-8095"
- "11210:11210"
- "9102:9102"
healthcheck: # checks couchbase server is up
test: ["CMD", "curl", "-v", "http://localhost:8091/pools"]
interval: 20s
timeout: 20s
retries: 5
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
Expand All @@ -111,6 +122,9 @@ jobs:
- name: Install mongodb when on linux
run: |
pip install -e .[retrievechat-mongodb]
- name: Install couchbase when on linux
run: |
pip install -e .[retrievechat-couchbase]
- name: Install unstructured when python-version is 3.9 and on linux
if: matrix.python-version == '3.9'
run: |
Expand Down
Empty file.
26 changes: 25 additions & 1 deletion autogen/agentchat/contrib/gpt_assistant_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,10 +209,12 @@ def _invoke_assistant(
for message in pending_messages:
if message["content"].strip() == "":
continue
# Convert message roles to 'user' or 'assistant', by calling _map_role_for_api, to comply with OpenAI API spec
api_role = self._map_role_for_api(message["role"])
self._openai_client.beta.threads.messages.create(
thread_id=assistant_thread.id,
content=message["content"],
role=message["role"],
role=api_role,
)

# Create a new run to get responses from the assistant
Expand Down Expand Up @@ -240,6 +242,28 @@ def _invoke_assistant(
self._unread_index[sender] = len(self._oai_messages[sender]) + 1
return True, response

def _map_role_for_api(self, role: str) -> str:
"""
Maps internal message roles to the roles expected by the OpenAI Assistant API.
Args:
role (str): The role from the internal message.
Returns:
str: The mapped role suitable for the API.
"""
if role in ["function", "tool"]:
return "assistant"
elif role == "system":
return "system"
elif role == "user":
return "user"
elif role == "assistant":
return "assistant"
else:
# Default to 'assistant' for any other roles not recognized by the API
return "assistant"

def _get_run_response(self, thread, run):
"""
Waits for and processes the response of a run from the OpenAI assistant.
Expand Down
2 changes: 1 addition & 1 deletion autogen/agentchat/contrib/retrieve_user_proxy_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def __init__(
vector db. Default is None, SentenceTransformer with the given `embedding_model`
will be used. If you want to use OpenAI, Cohere, HuggingFace or other embedding
functions, you can pass it here,
follow the examples in `https://docs.trychroma.com/embeddings`.
follow the examples in `https://docs.trychroma.com/guides/embeddings`.
- `customized_prompt` (Optional, str) - the customized prompt for the retrieve chat.
Default is None.
- `customized_answer_prefix` (Optional, str) - the customized answer prefix for the
Expand Down
6 changes: 5 additions & 1 deletion autogen/agentchat/contrib/vectordb/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ class VectorDBFactory:
Factory class for creating vector databases.
"""

PREDEFINED_VECTOR_DB = ["chroma", "pgvector", "mongodb", "qdrant"]
PREDEFINED_VECTOR_DB = ["chroma", "pgvector", "mongodb", "qdrant", "couchbase"]

@staticmethod
def create_vector_db(db_type: str, **kwargs) -> VectorDB:
Expand Down Expand Up @@ -231,6 +231,10 @@ def create_vector_db(db_type: str, **kwargs) -> VectorDB:
from .qdrant import QdrantVectorDB

return QdrantVectorDB(**kwargs)
if db_type.lower() in ["couchbase", "couchbasedb", "capella"]:
from .couchbase import CouchbaseVectorDB

return CouchbaseVectorDB(**kwargs)
else:
raise ValueError(
f"Unsupported vector database type: {db_type}. Valid types are {VectorDBFactory.PREDEFINED_VECTOR_DB}."
Expand Down
7 changes: 6 additions & 1 deletion autogen/agentchat/contrib/vectordb/chromadb.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@
except ImportError:
raise ImportError("Please install chromadb: `pip install chromadb`")

try:
from chromadb.errors import ChromaError
except ImportError:
ChromaError = Exception

CHROMADB_MAX_BATCH_SIZE = os.environ.get("CHROMADB_MAX_BATCH_SIZE", 40000)
logger = get_logger(__name__)

Expand Down Expand Up @@ -84,7 +89,7 @@ def create_collection(
collection = self.active_collection
else:
collection = self.client.get_collection(collection_name, embedding_function=self.embedding_function)
except ValueError:
except (ValueError, ChromaError):
collection = None
if collection is None:
return self.client.create_collection(
Expand Down
Loading

0 comments on commit 64af078

Please sign in to comment.