From b0fa2484ce35904287e421536b65e709560334eb Mon Sep 17 00:00:00 2001 From: Julien Dubois Date: Wed, 3 Jul 2024 16:32:47 +0200 Subject: [PATCH] Bump com.azure:azure-ai-openai from 1.0.0-beta.8 to 1.0.0-beta.10 (#1401) - Bump com.azure:azure-ai-openai from 1.0.0-beta.8 to 1.0.0-beta.10 - Migrate to Sweden Central - Fix several tests using the same as https://github.com/langchain4j/langchain4j/pull/1022 --- .../azure/AzureOpenAIResponsibleAIIT.java | 6 +- .../azure/AzureOpenAiEmbeddingModelIT.java | 4 +- .../model/azure/AzureOpenAiImageModelIT.java | 7 +- .../azure/AzureOpenAiLanguageModelIT.java | 4 +- .../AzureOpenAiStreamingChatModelIT.java | 20 ++--- .../AzureOpenAiStreamingLanguageModelIT.java | 2 +- .../test/script/deploy-azure-openai-models.sh | 76 +------------------ langchain4j-parent/pom.xml | 2 +- 8 files changed, 26 insertions(+), 95 deletions(-) diff --git a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAIResponsibleAIIT.java b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAIResponsibleAIIT.java index 55099e824b6..252befdb510 100644 --- a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAIResponsibleAIIT.java +++ b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAIResponsibleAIIT.java @@ -80,7 +80,7 @@ void image_should_trigger_content_filter_for_sexual_content() { AzureOpenAiImageModel model = AzureOpenAiImageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("dall-e-3") + .deploymentName("dall-e-3-30") .logRequestsAndResponses(true) .build(); @@ -98,7 +98,7 @@ void language_model_should_trigger_content_filter_for_violence() { LanguageModel model = AzureOpenAiLanguageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("gpt-35-turbo-instruct") + .deploymentName("gpt-35-turbo-instruct-0914") .tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT)) .temperature(0.0) .maxTokens(20) @@ -170,7 +170,7 @@ void streaming_language_should_trigger_content_filter_for_violence(String deploy StreamingLanguageModel model = AzureOpenAiStreamingLanguageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("gpt-35-turbo-instruct") + .deploymentName("gpt-35-turbo-instruct-0914") .tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT)) .temperature(0.0) .maxTokens(20) diff --git a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiEmbeddingModelIT.java b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiEmbeddingModelIT.java index c475c5208fe..01496a1db7b 100644 --- a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiEmbeddingModelIT.java +++ b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiEmbeddingModelIT.java @@ -25,7 +25,7 @@ class AzureOpenAiEmbeddingModelIT { EmbeddingModel model = AzureOpenAiEmbeddingModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("text-embedding-ada-002") + .deploymentName("text-embedding-ada-002-2") .tokenizer(new AzureOpenAiTokenizer(TEXT_EMBEDDING_ADA_002)) .logRequestsAndResponses(true) .build(); @@ -73,7 +73,7 @@ void should_embed_in_batches() { @ParameterizedTest(name = "Testing model {0}") @EnumSource(value = AzureOpenAiEmbeddingModelName.class, - mode = EXCLUDE, names = "TEXT_EMBEDDING_ADA_002_2") + mode = EXCLUDE, names = {"TEXT_EMBEDDING_ADA_002_2", "TEXT_EMBEDDING_3_SMALL", "TEXT_EMBEDDING_3_SMALL_1", "TEXT_EMBEDDING_3_LARGE", "TEXT_EMBEDDING_ADA_002", "TEXT_EMBEDDING_ADA_002_1"}) void should_support_all_string_model_names(AzureOpenAiEmbeddingModelName modelName) { // given diff --git a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiImageModelIT.java b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiImageModelIT.java index 4b30d0f9ab8..b6bd2675f5d 100644 --- a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiImageModelIT.java +++ b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiImageModelIT.java @@ -15,6 +15,7 @@ import java.util.Base64; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.params.provider.EnumSource.Mode.EXCLUDE; public class AzureOpenAiImageModelIT { @@ -26,7 +27,7 @@ void should_generate_image_with_url() { AzureOpenAiImageModel model = AzureOpenAiImageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("dall-e-3") + .deploymentName("dall-e-3-30") .logRequestsAndResponses(true) .build(); @@ -50,7 +51,7 @@ void should_generate_image_in_base64() throws IOException { AzureOpenAiImageModel model = AzureOpenAiImageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("dall-e-3") + .deploymentName("dall-e-3-30") .logRequestsAndResponses(false) // The image is big, so we don't want to log it by default .responseFormat(ImageGenerationResponseFormat.BASE64.toString()) .build(); @@ -75,7 +76,7 @@ void should_generate_image_in_base64() throws IOException { } @ParameterizedTest(name = "Testing model {0}") - @EnumSource(AzureOpenAiImageModelName.class) + @EnumSource(value = AzureOpenAiImageModelName.class, mode = EXCLUDE, names = "DALL_E_3") void should_support_all_string_model_names(AzureOpenAiImageModelName modelName) { // given diff --git a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiLanguageModelIT.java b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiLanguageModelIT.java index fd56d01c395..162b979672f 100644 --- a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiLanguageModelIT.java +++ b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiLanguageModelIT.java @@ -22,7 +22,7 @@ class AzureOpenAiLanguageModelIT { LanguageModel model = AzureOpenAiLanguageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("gpt-35-turbo-instruct") + .deploymentName("gpt-35-turbo-instruct-0914") .tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT)) .temperature(0.0) .maxTokens(20) @@ -59,7 +59,7 @@ void should_generate_answer_and_finish_reason_length() { @ParameterizedTest(name = "Testing model {0}") @EnumSource(value = AzureOpenAiLanguageModelName.class, - mode = EXCLUDE, names = {"TEXT_DAVINCI_002", "TEXT_DAVINCI_002_1"}) + mode = EXCLUDE, names = {"GPT_3_5_TURBO_INSTRUCT", "TEXT_DAVINCI_002", "TEXT_DAVINCI_002_1"}) void should_support_all_string_model_names(AzureOpenAiLanguageModelName modelName) { // given diff --git a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingChatModelIT.java b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingChatModelIT.java index 907c0caa85a..68574793fcb 100644 --- a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingChatModelIT.java +++ b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingChatModelIT.java @@ -47,6 +47,8 @@ class AzureOpenAiStreamingChatModelIT { Percentage tokenizerPrecision = withPercentage(5); + public long STREAMING_TIMEOUT = 120; + @ParameterizedTest(name = "Deployment name {0} using {1} with async client set to {2}") @CsvSource({ "gpt-4o, gpt-4o, true", @@ -90,8 +92,8 @@ public void onError(Throwable error) { } }); - String answer = futureAnswer.get(30, SECONDS); - Response response = futureResponse.get(30, SECONDS); + String answer = futureAnswer.get(STREAMING_TIMEOUT, SECONDS); + Response response = futureResponse.get(STREAMING_TIMEOUT, SECONDS); assertThat(answer).contains("Paris"); assertThat(response.content().text()).isEqualTo(answer); @@ -154,8 +156,8 @@ public void onError(Throwable error) { } }); - String answer = futureAnswer.get(30, SECONDS); - Response response = futureResponse.get(30, SECONDS); + String answer = futureAnswer.get(STREAMING_TIMEOUT, SECONDS); + Response response = futureResponse.get(STREAMING_TIMEOUT, SECONDS); assertThat(answer).contains("Paris"); assertThat(response.content().text()).isEqualTo(answer); @@ -241,7 +243,7 @@ public void onError(Throwable error) { } }); - Response response = futureResponse.get(30, SECONDS); + Response response = futureResponse.get(STREAMING_TIMEOUT, SECONDS); AiMessage aiMessage = response.content(); assertThat(aiMessage.text()).isNull(); @@ -282,7 +284,7 @@ public void onError(Throwable error) { } }); - Response response2 = futureResponse2.get(30, SECONDS); + Response response2 = futureResponse2.get(STREAMING_TIMEOUT, SECONDS); AiMessage aiMessage2 = response2.content(); // then @@ -356,7 +358,7 @@ public void onError(Throwable error) { } }); - Response response = futureResponse.get(30, SECONDS); + Response response = futureResponse.get(STREAMING_TIMEOUT, SECONDS); AiMessage aiMessage = response.content(); assertThat(aiMessage.text()).isNull(); @@ -402,7 +404,7 @@ public void onError(Throwable error) { } }); - Response response2 = futureResponse2.get(30, SECONDS); + Response response2 = futureResponse2.get(STREAMING_TIMEOUT, SECONDS); AiMessage aiMessage2 = response2.content(); // then @@ -560,7 +562,7 @@ public void onComplete(Response response) { // when model.generate(userMessage, handler); - String content = future.get(5, SECONDS); + String content = future.get(STREAMING_TIMEOUT, SECONDS); // then assertThat(content).contains("Access denied due to invalid subscription key or wrong API endpoint"); diff --git a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingLanguageModelIT.java b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingLanguageModelIT.java index 910fecee117..4b1ff911774 100644 --- a/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingLanguageModelIT.java +++ b/langchain4j-azure-open-ai/src/test/java/dev/langchain4j/model/azure/AzureOpenAiStreamingLanguageModelIT.java @@ -22,7 +22,7 @@ class AzureOpenAiStreamingLanguageModelIT { StreamingLanguageModel model = AzureOpenAiStreamingLanguageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) - .deploymentName("gpt-35-turbo-instruct") + .deploymentName("gpt-35-turbo-instruct-0914") .tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT)) .temperature(0.0) .maxTokens(20) diff --git a/langchain4j-azure-open-ai/src/test/script/deploy-azure-openai-models.sh b/langchain4j-azure-open-ai/src/test/script/deploy-azure-openai-models.sh index 089db66e258..13369f1ee71 100755 --- a/langchain4j-azure-open-ai/src/test/script/deploy-azure-openai-models.sh +++ b/langchain4j-azure-open-ai/src/test/script/deploy-azure-openai-models.sh @@ -16,9 +16,9 @@ echo "Setting up environment variables..." echo "----------------------------------" -PROJECT="langchain4j-eastus" +PROJECT="langchain4j-swedencentral" RESOURCE_GROUP="rg-$PROJECT" -LOCATION="eastus" +LOCATION="swedencentral" AI_SERVICE="ai-$PROJECT" TAG="$PROJECT" @@ -50,18 +50,6 @@ az cognitiveservices account create \ echo "Deploying Chat Models" echo "=====================" -echo "Deploying a gpt-35-turbo-0301 model..." -echo "----------------------" -az cognitiveservices account deployment create \ - --name "$AI_SERVICE" \ - --resource-group "$RESOURCE_GROUP" \ - --deployment-name "gpt-35-turbo-0301" \ - --model-name "gpt-35-turbo" \ - --model-version "0125" \ - --model-format "OpenAI" \ - --sku-capacity 1 \ - --sku-name "Standard" - echo "Deploying a gpt-35-turbo-0613 model..." echo "----------------------" az cognitiveservices account deployment create \ @@ -110,30 +98,6 @@ az cognitiveservices account deployment create \ --sku-capacity 1 \ --sku-name "Standard" -echo "Deploying a gpt-4-0125-preview model..." -echo "----------------------" -az cognitiveservices account deployment create \ - --name "$AI_SERVICE" \ - --resource-group "$RESOURCE_GROUP" \ - --deployment-name "gpt-4-0125-preview" \ - --model-name "gpt-4" \ - --model-version "0125-preview" \ - --model-format "OpenAI" \ - --sku-capacity 1 \ - --sku-name "Standard" - -echo "Deploying a gpt-4-1106-preview model..." -echo "----------------------" -az cognitiveservices account deployment create \ - --name "$AI_SERVICE" \ - --resource-group "$RESOURCE_GROUP" \ - --deployment-name "gpt-4-1106-preview" \ - --model-name "gpt-4" \ - --model-version "1106-preview" \ - --model-format "OpenAI" \ - --sku-capacity 1 \ - --sku-name "Standard" - echo "Deploying a gpt-4-turbo-2024-04-09 model..." echo "----------------------" az cognitiveservices account deployment create \ @@ -186,18 +150,6 @@ az cognitiveservices account deployment create \ echo "Deploying Embedding Models" echo "==========================" -echo "Deploying a text-embedding-ada-002-1 model..." -echo "----------------------" -az cognitiveservices account deployment create \ - --name "$AI_SERVICE" \ - --resource-group "$RESOURCE_GROUP" \ - --deployment-name "text-embedding-ada-002-1" \ - --model-name "text-embedding-ada-002" \ - --model-version "1" \ - --model-format "OpenAI" \ - --sku-capacity 1 \ - --sku-name "Standard" - echo "Deploying a text-embedding-ada-002-2 model..." echo "----------------------" az cognitiveservices account deployment create \ @@ -210,18 +162,6 @@ az cognitiveservices account deployment create \ --sku-capacity 1 \ --sku-name "Standard" -echo "Deploying a text-embedding-3-small-1 model..." -echo "----------------------" -az cognitiveservices account deployment create \ - --name "$AI_SERVICE" \ - --resource-group "$RESOURCE_GROUP" \ - --deployment-name "text-embedding-3-small-1" \ - --model-name "text-embedding-3-small" \ - --model-version "1" \ - --model-format "OpenAI" \ - --sku-capacity 1 \ - --sku-name "Standard" - echo "Deploying a text-embedding-3-large-1 model..." echo "----------------------" az cognitiveservices account deployment create \ @@ -238,18 +178,6 @@ az cognitiveservices account deployment create \ echo "Deploying Image Models" echo "======================" -echo "Deploying a dall-e-3 model..." -echo "----------------------" -az cognitiveservices account deployment create \ - --name "$AI_SERVICE" \ - --resource-group "$RESOURCE_GROUP" \ - --deployment-name "dall-e-2-20" \ - --model-name "dall-e-2" \ - --model-version "2.0" \ - --model-format "OpenAI" \ - --sku-capacity 1 \ - --sku-name "Standard" - echo "Deploying a dall-e-3 model..." echo "----------------------" az cognitiveservices account deployment create \ diff --git a/langchain4j-parent/pom.xml b/langchain4j-parent/pom.xml index d0db5fe0df1..410efc713c2 100644 --- a/langchain4j-parent/pom.xml +++ b/langchain4j-parent/pom.xml @@ -19,7 +19,7 @@ UTF-8 1714382357 0.17.0 - 1.0.0-beta.8 + 1.0.0-beta.10 11.6.6 12.26.1 12.25.1