Skip to content

Commit

Permalink
Merge pull request #18 from jhakulin/main
Browse files Browse the repository at this point in the history
Authentication updates, bug fixes
  • Loading branch information
jhakulin authored May 11, 2024
2 parents 9617aa7 + eea4a6b commit 0bce3ff
Show file tree
Hide file tree
Showing 24 changed files with 470 additions and 254 deletions.
22 changes: 11 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@

### Step 2: Install Python

Azure AI Assistant tool requires Python >= 3.8 on your system. You can download Python from [the official website](https://www.python.org/downloads/).
The Azure AI Assistant tool requires Python >= 3.8 on your system. You can download Python from [the official website](https://www.python.org/downloads/).
Latest stable Python version is recommended.

Create a new Python virtual environment. Virtual environment is recommended for safe install the SDK packages:
Expand All @@ -81,7 +81,15 @@ Create a new Python virtual environment. Virtual environment is recommended for
.venv\scripts\activate
```

### Step 3: Build and install azure.ai.assistant Python library
### Step 3: Install the tool and dependencies

Run the following command in your terminal to install all the necessary dependencies as specified in the requirements.txt file.

```
pip install -r requirements.txt
```

### Step 4: Build and install azure.ai.assistant Python library (Optional)

We have included the `azure.ai.assistant` library currently within this repository as part of the alpha status of the project.
The plan is to release library on PyPI for more convenience installation in future.
Expand All @@ -92,17 +100,9 @@ Build the wheel for `azure.ai.assistant` library using the following instruction
- Go to the`sdk/azure-ai-assistant` folder
- Build the wheel using following command: `python setup.py sdist bdist_wheel`
- Go to generated `dist` folder
- Install the generated wheel using following command: `pip install --force-reinstall azure_ai_assistant-0.3.1a1-py3-none-any.whl`
- Install the generated wheel using following command: `pip install --force-reinstall azure_ai_assistant-0.x.xa1-py3-none-any.whl`
- This installation will pick the necessary dependencies for the library (openai, python-Levenshtein, fuzzywuzzy, Pillow, requests)

### Step 4: Install Python UI libraries

Install following specific libraries needed for the Azure AI Assistant Tool GUI

```
pip install PySide6
pip install azure-cognitiveservices-speech
```

### Step 5: Find and copy your Azure OpenAI Service APIkey, endpoint and model deployment version

Expand Down
2 changes: 1 addition & 1 deletion gui/assistant_dialogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# For more details on PySide6's license, see <https://www.qt.io/licensing>

from PySide6 import QtGui
from PySide6.QtWidgets import QDialog, QGroupBox, QSplitter, QComboBox, QSpinBox, QListWidgetItem, QTabWidget, QSizePolicy, QScrollArea, QHBoxLayout, QWidget, QFileDialog, QListWidget, QLineEdit, QVBoxLayout, QPushButton, QLabel, QCheckBox, QTextEdit, QMessageBox, QSlider
from PySide6.QtWidgets import QDialog, QGroupBox, QSplitter, QComboBox, QSpinBox, QListWidgetItem, QTabWidget, QSizePolicy, QHBoxLayout, QWidget, QFileDialog, QListWidget, QLineEdit, QVBoxLayout, QPushButton, QLabel, QCheckBox, QTextEdit, QMessageBox, QSlider
from PySide6.QtCore import Qt, QSize, Signal
from PySide6.QtGui import QIcon, QTextOption

Expand Down
5 changes: 3 additions & 2 deletions gui/conversation_sidebar.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# This software uses the PySide6 library, which is licensed under the GNU Lesser General Public License (LGPL).
# For more details on PySide6's license, see <https://www.qt.io/licensing>

from PySide6.QtWidgets import QWidget, QCheckBox, QLabel, QInputDialog, QComboBox, QListWidgetItem, QFileDialog, QVBoxLayout, QSizePolicy, QHBoxLayout, QPushButton, QListWidget, QMessageBox, QMenu
from PySide6.QtWidgets import QWidget, QCheckBox, QLabel, QComboBox, QListWidgetItem, QFileDialog, QVBoxLayout, QSizePolicy, QHBoxLayout, QPushButton, QListWidget, QMessageBox, QMenu
from PySide6.QtCore import Qt, Signal, QSize
from PySide6.QtGui import QFont, QIcon, QAction

Expand Down Expand Up @@ -410,6 +410,7 @@ def load_assistant_list(self, ai_client_type : AIClientType):
for name in assistant_names:
if not self.assistant_client_manager.get_client(name):
assistant_config : AssistantConfig = self.assistant_config_manager.get_config(name)
assistant_config.config_folder = "config"
if assistant_config.assistant_type == "assistant":
assistant_client = AssistantClient.from_json(assistant_config.to_json(), self.main_window, self.main_window.connection_timeout)
else:
Expand All @@ -434,7 +435,7 @@ def on_ai_client_type_changed(self, index):
self.threadList.clear_files()

# Get the threads for the selected AI client type
threads_client = ConversationThreadClient.get_instance(self._ai_client_type)
threads_client = ConversationThreadClient.get_instance(self._ai_client_type, config_folder='config')
threads = threads_client.get_conversation_threads()
self.threadList.load_threads_with_attachments(threads)
except Exception as e:
Expand Down
9 changes: 6 additions & 3 deletions gui/main_window.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ def __init__(self):
QTimer.singleShot(100, lambda: self.deferred_init())

def initialize_singletons(self):
self.function_config_manager = FunctionConfigManager.get_instance()
self.assistant_config_manager = AssistantConfigManager.get_instance()
self.function_config_manager = FunctionConfigManager.get_instance('config')
self.assistant_config_manager = AssistantConfigManager.get_instance('config')
self.task_manager = TaskManager.get_instance(self)
self.assistant_client_manager = AssistantClientManager.get_instance()

Expand Down Expand Up @@ -83,7 +83,7 @@ def initialize_variables(self):
self.conversation_thread_clients : dict[AIClientType, ConversationThreadClient] = {}
for ai_client_type in AIClientType:
try:
self.conversation_thread_clients[ai_client_type] = ConversationThreadClient.get_instance(ai_client_type)
self.conversation_thread_clients[ai_client_type] = ConversationThreadClient.get_instance(ai_client_type, config_folder='config')
except Exception as e:
self.conversation_thread_clients[ai_client_type] = None
logger.error(f"Error initializing conversation thread client for ai_client_type {ai_client_type.name}: {e}")
Expand Down Expand Up @@ -229,6 +229,9 @@ def set_active_ai_client_type(self, ai_client_type : AIClientType):
if self.conversation_thread_clients[self.active_ai_client_type] is not None:
self.conversation_thread_clients[self.active_ai_client_type].save_conversation_threads()

# Save assistant configurations when switching AI client types
self.assistant_config_manager.save_configs()

self.conversation_view.conversationView.clear()
self.active_ai_client_type = ai_client_type
client = None
Expand Down
8 changes: 8 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Assistant middleware from GitHub Release
https://github.com/Azure-Samples/azureai-assistant-tool/releases/download/v0.3.7-alpha/azure_ai_assistant-0.3.7a1-py3-none-any.whl

# GUI Framework
PySide6

# Azure Cognitive Services Speech SDK
azure-cognitiveservices-speech
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ instructions: |-
2. You only ask same question more than once if you do not get satisfactory answer. The document form you have lists either possible answer options or accepts free form answers.
3. To know what is the next question to ask from the from, you will always analyze the earlier questions and answers in the conversation
4. After all questions has been answered and all questions from form have been answered successfully, you will provide a summary to user which contains all questions and answers.
model: gpt-3.5-turbo-0125
model: gpt-4-32k
assistant_id: 770c45f7-fd9e-4364-a3ab-ef4d5d861b4c
file_references:
- C:/Git/azureai-assistant-tool/samples/PetTravelPlanChatAssistant/FormTemplateForPetTransportation.yaml
Expand Down Expand Up @@ -71,7 +71,7 @@ functions:
file_search: false
code_interpreter: false
output_folder_path: output
ai_client_type: OPEN_AI
ai_client_type: AZURE_OPEN_AI
assistant_type: chat_assistant
completion_settings: null
assistant_role: user
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@
import platform
import random
import re
import sounddevice as sd
import yfinance as yf


# This file is auto-generated. Do not edit directly.

Expand Down
47 changes: 43 additions & 4 deletions samples/PetTravelPlanChatAssistant/main.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root for full license information.

import asyncio
from azure.ai.assistant.management.async_chat_assistant_client import AsyncChatAssistantClient
from azure.ai.assistant.management.ai_client_factory import AsyncAIClientType
from azure.ai.assistant.management.async_assistant_client_callbacks import AsyncAssistantClientCallbacks
from azure.ai.assistant.management.async_conversation_thread_client import AsyncConversationThreadClient

import os, asyncio, yaml
import azure.identity.aio


# Define a custom callback class that inherits from AssistantClientCallbacks
class MyAssistantClientCallbacks(AsyncAssistantClientCallbacks):
Expand All @@ -19,6 +21,9 @@ async def handle_message(self, action, message=""):
async def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message=False, message=None):
if run_status == "streaming":
await self.handle_message("start" if is_first_message else "message", message)

async def on_run_end(self, assistant_name, run_identifier, run_end_time, thread_name, response=None):
pass

async def on_function_call_processed(self, assistant_name, run_identifier, function_name, arguments, response):
await self.handle_message("function", function_name)
Expand All @@ -40,26 +45,60 @@ async def display_streamed_messages(message_queue, assistant_name):
message_queue.task_done()


# Define the main function
async def get_client_args(ai_client_type : str):
try:
client_args = {}
if ai_client_type == "AZURE_OPEN_AI":
if os.getenv("AZURE_OPENAI_API_KEY"):
# Authenticate using an Azure OpenAI API key
# This is generally discouraged, but is provided for developers
# that want to develop locally inside the Docker container.
print("Using Azure OpenAI with key")
client_args["api_key"] = os.getenv("AZURE_OPENAI_API_KEY")
else:
# Authenticate using the default Azure credential chain
# See https://docs.microsoft.com/azure/developer/python/azure-sdk-authenticate#defaultazurecredential
# This will *not* work inside a Docker container.
print("Using Azure OpenAI with default credential")
default_credential = azure.identity.aio.DefaultAzureCredential(
exclude_shared_token_cache_credential=True
)
client_args["azure_ad_token_provider"] = azure.identity.aio.get_bearer_token_provider(
default_credential, "https://cognitiveservices.azure.com/.default"
)
elif ai_client_type == "OPEN_AI":
# Authenticate using an OpenAI API key
if os.getenv("OPENAI_API_KEY"):
print("Using OpenAI with key")
client_args["api_key"] = os.getenv("OPENAI_API_KEY")
else:
raise Exception("OpenAI API key not found.")
return client_args
except Exception as e:
print(f"An error occurred: {e}")
return {}

async def main():

assistant_name = "PetTravelPlanChatAssistant"
try:
with open(f"config/{assistant_name}_assistant_config.yaml", "r") as file:
config = file.read()
config = yaml.safe_load(file)
except FileNotFoundError:
print(f"Configuration file for {assistant_name} not found.")
return

try:
client_args = await get_client_args(config["ai_client_type"])
# Create a message queue to store streamed messages and a custom callback class
message_queue = asyncio.Queue()
callbacks = MyAssistantClientCallbacks(message_queue)

# Create an instance of the AsyncChatAssistantClient
assistant_client = await AsyncChatAssistantClient.from_yaml(config, callbacks=callbacks)
assistant_client = await AsyncChatAssistantClient.from_yaml(yaml.dump(config), callbacks=callbacks, **client_args)
ai_client_type = AsyncAIClientType[assistant_client.assistant_config.ai_client_type]

print(f"Starting chat with {assistant_client.ai_client} assistant.")
# Create an instance of the AsyncConversationThreadClient
conversation_thread_client = AsyncConversationThreadClient.get_instance(ai_client_type)

Expand Down
2 changes: 1 addition & 1 deletion sdk/azure-ai-assistant/azure/ai/assistant/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

VERSION = "0.3.1a1"
VERSION = "0.3.7a1"
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ def get_instance(cls) -> "AIClientFactory":
def get_client(
self,
client_type: Union[AIClientType, AsyncAIClientType],
api_version: str = None
api_version: str = None,
**client_args
) -> Union[OpenAI, AzureOpenAI, AsyncOpenAI, AsyncAzureOpenAI]:
"""
Get an AI client, synchronous or asynchronous, based on the given type and API version.
Expand All @@ -67,45 +68,29 @@ def get_client(
:type client_type: Union[AIClientType, AsyncAIClientType]
:param api_version: The API version to use.
:type api_version: str
:param client_args: Additional keyword arguments for configuring the AI client.
:type client_args: Dict
:return: The AI client.
:rtype: Union[OpenAI, AzureOpenAI, AsyncOpenAI, AsyncAzureOpenAI]
"""
# Set the default API version or use environment override
api_version = os.getenv("AZURE_OPENAI_API_VERSION", api_version or "2024-02-15-preview")

api_version = os.getenv("AZURE_OPENAI_API_VERSION", api_version) or "2024-02-15-preview"
client_key = (client_type, api_version)
if client_key in self._clients:
return self._clients[client_key]

if isinstance(client_type, AIClientType):
if client_type == AIClientType.AZURE_OPEN_AI:
# Instantiate synchronous Azure OpenAI client
self._check_and_prepare_env_vars(["AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT"])
self._clients[client_key] = AzureOpenAI(api_version=api_version, azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"))
self._clients[client_key] = AzureOpenAI(api_version=api_version, azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"), **client_args)
elif client_type == AIClientType.OPEN_AI:
# Instantiate synchronous OpenAI client
self._check_and_prepare_env_vars(["OPENAI_API_KEY"])
self._clients[client_key] = OpenAI()
self._clients[client_key] = OpenAI(**client_args)

elif isinstance(client_type, AsyncAIClientType):
if client_type == AsyncAIClientType.AZURE_OPEN_AI:
# Instantiate asynchronous Azure OpenAI client
self._check_and_prepare_env_vars(["AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT"])
self._clients[client_key] = AsyncAzureOpenAI(api_version=api_version, azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"))
self._clients[client_key] = AsyncAzureOpenAI(api_version=api_version, azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"), **client_args)
elif client_type == AsyncAIClientType.OPEN_AI:
# Instantiate asynchronous OpenAI client
self._check_and_prepare_env_vars(["OPENAI_API_KEY"])
self._clients[client_key] = AsyncOpenAI()
self._clients[client_key] = AsyncOpenAI(**client_args)
else:
raise ValueError(f"Invalid client type: {client_type}")

return self._clients[client_key]

def _check_and_prepare_env_vars(self, env_vars: list):
"""Utility method to check for required environment variables and raise an EngineError if not found."""
for env_var in env_vars:
value = os.getenv(env_var)
if not value:
error_message = f"{env_var} is not set"
logger.warning(error_message)
raise EngineError(error_message)
Loading

0 comments on commit 0bce3ff

Please sign in to comment.