From 40cf2a1a6f4e86be6d2b9467ab51f13df8a84645 Mon Sep 17 00:00:00 2001 From: Alex Thewsey Date: Fri, 30 Aug 2024 07:14:28 +0000 Subject: [PATCH] feat: Connectors for Amazon Bedrock --- README.md | 12 +- ...on-bedrock-ai21-labs-j2-mid-connector.json | 15 ++ ...-bedrock-ai21-labs-j2-ultra-connector.json | 15 ++ ...ck-ai21-labs-jamba-instruct-connector.json | 15 ++ ...mazon-titan-text-g1-express-connector.json | 15 ++ ...k-amazon-titan-text-g1-lite-connector.json | 15 ++ ...anthropic-claude-3-5-sonnet-connector.json | 15 ++ ...ck-anthropic-claude-3-haiku-connector.json | 15 ++ ...ock-anthropic-claude-3-opus-connector.json | 15 ++ ...k-anthropic-claude-3-sonnet-connector.json | 15 ++ ...azon-bedrock-cohere-command-connector.json | 15 ++ ...edrock-cohere-command-light-connector.json | 15 ++ ...on-bedrock-cohere-command-r-connector.json | 15 ++ ...eta-llama-3-1-405b-instruct-connector.json | 15 ++ ...meta-llama-3-1-70b-instruct-connector.json | 15 ++ ...-meta-llama-3-1-8b-instruct-connector.json | 15 ++ ...k-meta-llama-3-70b-instruct-connector.json | 15 ++ ...ck-meta-llama-3-8b-instruct-connector.json | 15 ++ ...tral-ai-mistral-7b-instruct-connector.json | 15 ++ ...-mistral-ai-mistral-large-2-connector.json | 15 ++ ...ck-mistral-ai-mistral-large-connector.json | 15 ++ ...al-ai-mixtral-8x7b-instruct-connector.json | 15 ++ connectors/amazon-bedrock-connector.py | 183 ++++++++++++++++++ pyproject.toml | 1 + 24 files changed, 506 insertions(+), 5 deletions(-) create mode 100644 connectors-endpoints/amazon-bedrock-ai21-labs-j2-mid-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-ai21-labs-j2-ultra-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-ai21-labs-jamba-instruct-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-express-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-lite-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-anthropic-claude-3-5-sonnet-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-anthropic-claude-3-haiku-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-anthropic-claude-3-opus-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-anthropic-claude-3-sonnet-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-cohere-command-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-cohere-command-light-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-cohere-command-r-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-meta-llama-3-1-405b-instruct-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-meta-llama-3-1-70b-instruct-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-meta-llama-3-1-8b-instruct-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-meta-llama-3-70b-instruct-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-meta-llama-3-8b-instruct-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-mistral-ai-mistral-7b-instruct-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-2-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-connector.json create mode 100644 connectors-endpoints/amazon-bedrock-mistral-ai-mixtral-8x7b-instruct-connector.json create mode 100644 connectors/amazon-bedrock-connector.py diff --git a/README.md b/README.md index 6546877..5c20583 100644 --- a/README.md +++ b/README.md @@ -123,11 +123,13 @@ Please fork the repo and create a pull request. You can also open an issue with ### Connectors | Connector | Description | -|---|---| -| Claude2 | For Claude2 API| -| HuggingFace | For HuggingFace Inference Endpoints | -| OpenAI | For OpenAI API | -| TogetherAI | For TogetherAI Severless API | +|---|---| +| [amazon-bedrock-connector](connectors/amazon-bedrock-connector.py) | For models consumed through [AWS' Bedrock service](https://aws.amazon.com/bedrock/) | +| [azure-openai-connector](connectors/azure-openai-connector.py) | For models consumed through [Azure OpenAI Service](https://azure.microsoft.com/en-us/products/ai-services/openai-service) | +| [claude2-connector](connectors/aude2-connector.py) | For Anthropic's [Claude 2 API](https://www.anthropic.com/api) | +| [huggingface-connector](connectors/huggingface-connector.py) | For [Hugging Face Inference Endpoints](https://huggingface.co/docs/inference-endpoints/index) | +| [openai-connector](connectors/openai-connector.py) | For the [OpenAI API](https://openai.com/api/) | +| [together-connector](connectors/together-connector.py) | For [TogetherAI Severless API](https://www.together.ai/products#inference) |
diff --git a/connectors-endpoints/amazon-bedrock-ai21-labs-j2-mid-connector.json b/connectors-endpoints/amazon-bedrock-ai21-labs-j2-mid-connector.json new file mode 100644 index 0000000..8cf5f5e --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-ai21-labs-j2-mid-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - AI21 Labs Jurassic-2 Mid", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "ai21.j2-mid-v1" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-ai21-labs-j2-ultra-connector.json b/connectors-endpoints/amazon-bedrock-ai21-labs-j2-ultra-connector.json new file mode 100644 index 0000000..5acaeaa --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-ai21-labs-j2-ultra-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - AI21 Labs Jurassic-2 Ultra", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "ai21.j2-ultra-v1" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-ai21-labs-jamba-instruct-connector.json b/connectors-endpoints/amazon-bedrock-ai21-labs-jamba-instruct-connector.json new file mode 100644 index 0000000..8f2c495 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-ai21-labs-jamba-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - AI21 Labs Jamba Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "ai21.jamba-instruct-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-express-connector.json b/connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-express-connector.json new file mode 100644 index 0000000..bf8a5f4 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-express-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Titan Text G1 - Express", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "amazon.titan-text-express-v1" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-lite-connector.json b/connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-lite-connector.json new file mode 100644 index 0000000..d49f5b3 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-amazon-titan-text-g1-lite-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Titan Text G1 - Lite", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "amazon.titan-text-lite-v1" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-anthropic-claude-3-5-sonnet-connector.json b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-5-sonnet-connector.json new file mode 100644 index 0000000..b32cc84 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-5-sonnet-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Anthropic Claude 3.5 Sonnet", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "anthropic.claude-3-5-sonnet-20240620-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-anthropic-claude-3-haiku-connector.json b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-haiku-connector.json new file mode 100644 index 0000000..15864fd --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-haiku-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Anthropic Claude 3 Haiku", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "anthropic.claude-3-haiku-20240307-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-anthropic-claude-3-opus-connector.json b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-opus-connector.json new file mode 100644 index 0000000..306366e --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-opus-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Anthropic Claude 3 Opus", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "anthropic.claude-3-opus-20240229-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-anthropic-claude-3-sonnet-connector.json b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-sonnet-connector.json new file mode 100644 index 0000000..d2e8770 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-anthropic-claude-3-sonnet-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Anthropic Claude 3 Sonnet", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "anthropic.claude-3-sonnet-20240229-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-cohere-command-connector.json b/connectors-endpoints/amazon-bedrock-cohere-command-connector.json new file mode 100644 index 0000000..461f6ce --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-cohere-command-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Cohere Command", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "cohere.command-text-v14" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-cohere-command-light-connector.json b/connectors-endpoints/amazon-bedrock-cohere-command-light-connector.json new file mode 100644 index 0000000..60df565 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-cohere-command-light-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Cohere Command Light", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "cohere.command-light-text-v14" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-cohere-command-r-connector.json b/connectors-endpoints/amazon-bedrock-cohere-command-r-connector.json new file mode 100644 index 0000000..b6240b6 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-cohere-command-r-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Cohere Command R+", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "cohere.command-r-plus-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-meta-llama-3-1-405b-instruct-connector.json b/connectors-endpoints/amazon-bedrock-meta-llama-3-1-405b-instruct-connector.json new file mode 100644 index 0000000..aaa40c1 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-meta-llama-3-1-405b-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Llama 3.1 405B Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "meta.llama3-1-405b-instruct-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-meta-llama-3-1-70b-instruct-connector.json b/connectors-endpoints/amazon-bedrock-meta-llama-3-1-70b-instruct-connector.json new file mode 100644 index 0000000..4e2f43b --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-meta-llama-3-1-70b-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Llama 3.1 70B Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "meta.llama3-1-70b-instruct-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-meta-llama-3-1-8b-instruct-connector.json b/connectors-endpoints/amazon-bedrock-meta-llama-3-1-8b-instruct-connector.json new file mode 100644 index 0000000..7ce0afc --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-meta-llama-3-1-8b-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Llama 3.1 8B Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "meta.llama3-1-8b-instruct-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-meta-llama-3-70b-instruct-connector.json b/connectors-endpoints/amazon-bedrock-meta-llama-3-70b-instruct-connector.json new file mode 100644 index 0000000..5f31562 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-meta-llama-3-70b-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Llama 3 70B Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "meta.llama3-70b-instruct-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-meta-llama-3-8b-instruct-connector.json b/connectors-endpoints/amazon-bedrock-meta-llama-3-8b-instruct-connector.json new file mode 100644 index 0000000..4b8e1a6 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-meta-llama-3-8b-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Llama 3 8B Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "meta.llama3-8b-instruct-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-7b-instruct-connector.json b/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-7b-instruct-connector.json new file mode 100644 index 0000000..49e3271 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-7b-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Mistral 7B Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "mistral.mistral-7b-instruct-v0:2" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-2-connector.json b/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-2-connector.json new file mode 100644 index 0000000..2fd49c0 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-2-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Mistral Large 2", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "mistral.mistral-large-2407-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-connector.json b/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-connector.json new file mode 100644 index 0000000..3f79d29 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-mistral-ai-mistral-large-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Mistral Large", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "mistral.mistral-large-2402-v1:0" + } +} \ No newline at end of file diff --git a/connectors-endpoints/amazon-bedrock-mistral-ai-mixtral-8x7b-instruct-connector.json b/connectors-endpoints/amazon-bedrock-mistral-ai-mixtral-8x7b-instruct-connector.json new file mode 100644 index 0000000..80f2864 --- /dev/null +++ b/connectors-endpoints/amazon-bedrock-mistral-ai-mixtral-8x7b-instruct-connector.json @@ -0,0 +1,15 @@ +{ + "name": "Amazon Bedrock - Mixtral 8x7B Instruct", + "connector_type": "amazon-bedrock-connector", + "uri": "DEFAULT", + "token": "Use environment variables!", + "max_calls_per_second": 1, + "max_concurrency": 1, + "params": { + "timeout": 300, + "allow_retries": true, + "num_of_retries": 3, + "temperature": 0.5, + "model": "mistral.mixtral-8x7b-instruct-v0:1" + } +} \ No newline at end of file diff --git a/connectors/amazon-bedrock-connector.py b/connectors/amazon-bedrock-connector.py new file mode 100644 index 0000000..f949418 --- /dev/null +++ b/connectors/amazon-bedrock-connector.py @@ -0,0 +1,183 @@ +import logging + +import asyncio +import boto3 +from botocore.config import Config + +from moonshot.src.connectors.connector import Connector, perform_retry +from moonshot.src.connectors_endpoints.connector_endpoint_arguments import ( + ConnectorEndpointArguments, +) + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class AmazonBedrockConnector(Connector): + """Amazon Bedrock connector for AI Verify Moonshot + + Although you *could* provide a `token` in the configuration for this connector (which would be + treated as an AWS Session Token), it's recommended to leave this field as a short placeholder + e.g. 'NONE' and instead configure your AWS credentials via the environment - as standard for + boto3 and AWS CLI. Any `token` under 30 characters will be ignored as a placeholder. + For info see: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html + + In normal usage, you can also leave `uri` as a short placeholder value (like "DEFAULT") because + this will be automatically inferred based on your AWS environment setup. If you provide a `uri` + of 8 characters or more, it'll be treated like specifying a boto3 `client.endpoint_url`. + + You can override boto3 Session arguments (like `region_name`, `profile_name` by adding a + `session` dictionary in the connector's `params`. For available options, see: + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html + + Likewise you can override boto3 Client arguments (like `endpoint_url`) by adding them to a + `client` dictionary. Within this you can provide `config` options as shown below. See: + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client + https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html + + Examples + -------- + + The following configuration JSON example shows customising AWS/boto3 session, client, + client-config, inference guardrail, and other inference configuration parameters: + + >>> { + ... "name": "Amazon Bedrock Claude 3", + ... "connector_type": "aws-bedrock-connector", + ... "uri": "DEFAULT", + ... "token": "NONE", + ... "max_calls_per_second": 1, + ... "max_concurrency": 1, + ... "params": { + ... "timeout": 300, + ... "allow_retries": true, + ... "num_of_retries": 3, + ... "temperature": 0.5, + ... "model": "anthropic.claude-3-sonnet-20240229-v1:0", + ... "client": { + ... "endpoint_url": "https://...", + ... "config": { + ... "connect_timeout": 10, + ... "read_timeout": 60, + ... } + ... }, + ... "session": { + ... "region_name": "us-west-2", + ... }, + ... "guardrailConfig": { + ... "guardrailIdentifier": "...", + ... "guardrailVersion": "DRAFT", + ... "trace": "enabled" + ... }, + ... "inferenceConfig": { + ... "topP": 0.9 + ... } + ... } + ... } + """ + + def __init__(self, ep_arguments: ConnectorEndpointArguments): + # Initialize super class + super().__init__(ep_arguments) + + # Initialise AWS session: + session_kwargs = self.optional_params.get("session", {}) + # Provide an option to set AWS access token via moonshot standard, but ignore placeholders + # like 'NONE' since moonshot currently makes this field mandatory: + if self.token: + if len(self.token) < 30: + logger.info( + "Ignoring `token` with %s characters (doesn't look like an aws_session_token)", + len(self.token), + ) + else: + session_kwargs["aws_session_token"] = self.token + self._session = boto3.Session(**session_kwargs) + + # Optional advanced configurations for AWS service client: + client_kwargs = self.optional_params.get("client", {}) + if "config" in client_kwargs: + # Convert from JSON configuration dictionary to boto3 Python class: + client_kwargs["config"] = Config(**client_kwargs["config"]) + # Provide an option to set endpoint_url via moonshot standard, but ignore placeholders + # like 'DEFAULT' since moonshot currently makes this field mandatory: + if self.endpoint: + if len(self.endpoint) < 8: + logger.info( + "Ignoring placeholder `endpoint` (doesn't look like an AWS endpoint). Got: %s", + self.endpoint, + ) + elif "endpoint_url" in client_kwargs: + logger.info( + "Configured `client.endpoint_url` %s override configured `endpoint` %s", + client_kwargs["endpoint_url"], + self.endpoint, + ) + else: + client_kwargs["endpoint_url"] = self.endpoint + self._client = self._session.client("bedrock-runtime", **client_kwargs) + + # Set the model to use and remove it from optional_params if it exists + if "model" not in self.optional_params: + raise ValueError( + "`params.model` must be set to an enabled Model ID to use the Amazon Bedrock " + "connector. For more information, see: " + "https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html" + ) + self.model = self.optional_params["model"] + + @Connector.rate_limited + @perform_retry + async def get_response(self, prompt: str) -> str: + """Asynchronously send a prompt to the Amazon Bedrock API and return the generated response + + This method uses the Bedrock Converse API, which provides more cross-model standardization + than the basic InvokeModel API: + https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html + https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html + + Your `prompt` (plus this connector's configured `pre_prompt` prefix and `post_prompt` + suffix) will be sent to the model as a single user message. If a `system_message` has been + configured in the connector params, it will be passed as a `system` message in the Converse + API. Likewise `guardrailConfig` and `inferenceConfig` can be specified in connector params + and will be passed through. + + In the unexpected event that the model returns anything other than a single text message + (e.g. multiple messages or multi-media responses), this function will ignore any non-text + content and concatenate multiple text messages with double-newlines. + + Args: + prompt (str): The input prompt to send to the model. + + Returns: + str: The text response generated by the selected model. + """ + connector_prompt = f"{self.pre_prompt}{prompt}{self.post_prompt}" + req_params = { + "modelId": self.model, + "messages": [ + {"role": "user", "content": [{"text": connector_prompt}]}, + ], + } + for key in ["inferenceConfig", "guardrailConfig"]: + if key in self.optional_params: + req_params[key] = self.optional_params[key] + + # aioboto3 requires clients to be used as async context managers (so would either need to + # recreate the client for every request or otherwise hack around to work in Moonshot's API) + # - so we'll use the official boto3 SDK (synchronous) client and just wrap it with asyncio: + response = await asyncio.to_thread(lambda: self._client.converse(**req_params)) + message = response["output"]["message"] + if ( + (not message) + or message["role"] != "assistant" + or len(message["content"]) < 1 + ): + raise ValueError( + "Bedrock response did not include an assistant message with content. Got: %s", + message, + ) + # Ignore any non-text contents, and join together with '\n\n' if multiple are returned: + return "\n\n".join( + map(lambda m: m["text"], filter(lambda m: "text" in m, message["content"])) + ) diff --git a/pyproject.toml b/pyproject.toml index dd03c53..24831c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ readme = "README.md" [tool.poetry.dependencies] python = ">=3.11,<3.12" +boto3 = "^1.34.153" openai = "1.38.0" bert-score = "0.3.13" nltk = "3.8.1"