Skip to content

Commit

Permalink
Bump com.azure:azure-ai-openai from 1.0.0-beta.8 to 1.0.0-beta.10 (la…
Browse files Browse the repository at this point in the history
…ngchain4j#1401)

- Bump com.azure:azure-ai-openai from 1.0.0-beta.8 to 1.0.0-beta.10
- Migrate to Sweden Central
- Fix several tests using the same as
langchain4j#1022
  • Loading branch information
jdubois authored Jul 3, 2024
1 parent e24707a commit b0fa248
Show file tree
Hide file tree
Showing 8 changed files with 26 additions and 95 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ void image_should_trigger_content_filter_for_sexual_content() {
AzureOpenAiImageModel model = AzureOpenAiImageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("dall-e-3")
.deploymentName("dall-e-3-30")
.logRequestsAndResponses(true)
.build();

Expand All @@ -98,7 +98,7 @@ void language_model_should_trigger_content_filter_for_violence() {
LanguageModel model = AzureOpenAiLanguageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("gpt-35-turbo-instruct")
.deploymentName("gpt-35-turbo-instruct-0914")
.tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT))
.temperature(0.0)
.maxTokens(20)
Expand Down Expand Up @@ -170,7 +170,7 @@ void streaming_language_should_trigger_content_filter_for_violence(String deploy
StreamingLanguageModel model = AzureOpenAiStreamingLanguageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("gpt-35-turbo-instruct")
.deploymentName("gpt-35-turbo-instruct-0914")
.tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT))
.temperature(0.0)
.maxTokens(20)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class AzureOpenAiEmbeddingModelIT {
EmbeddingModel model = AzureOpenAiEmbeddingModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("text-embedding-ada-002")
.deploymentName("text-embedding-ada-002-2")
.tokenizer(new AzureOpenAiTokenizer(TEXT_EMBEDDING_ADA_002))
.logRequestsAndResponses(true)
.build();
Expand Down Expand Up @@ -73,7 +73,7 @@ void should_embed_in_batches() {

@ParameterizedTest(name = "Testing model {0}")
@EnumSource(value = AzureOpenAiEmbeddingModelName.class,
mode = EXCLUDE, names = "TEXT_EMBEDDING_ADA_002_2")
mode = EXCLUDE, names = {"TEXT_EMBEDDING_ADA_002_2", "TEXT_EMBEDDING_3_SMALL", "TEXT_EMBEDDING_3_SMALL_1", "TEXT_EMBEDDING_3_LARGE", "TEXT_EMBEDDING_ADA_002", "TEXT_EMBEDDING_ADA_002_1"})
void should_support_all_string_model_names(AzureOpenAiEmbeddingModelName modelName) {

// given
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import java.util.Base64;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.params.provider.EnumSource.Mode.EXCLUDE;

public class AzureOpenAiImageModelIT {

Expand All @@ -26,7 +27,7 @@ void should_generate_image_with_url() {
AzureOpenAiImageModel model = AzureOpenAiImageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("dall-e-3")
.deploymentName("dall-e-3-30")
.logRequestsAndResponses(true)
.build();

Expand All @@ -50,7 +51,7 @@ void should_generate_image_in_base64() throws IOException {
AzureOpenAiImageModel model = AzureOpenAiImageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("dall-e-3")
.deploymentName("dall-e-3-30")
.logRequestsAndResponses(false) // The image is big, so we don't want to log it by default
.responseFormat(ImageGenerationResponseFormat.BASE64.toString())
.build();
Expand All @@ -75,7 +76,7 @@ void should_generate_image_in_base64() throws IOException {
}

@ParameterizedTest(name = "Testing model {0}")
@EnumSource(AzureOpenAiImageModelName.class)
@EnumSource(value = AzureOpenAiImageModelName.class, mode = EXCLUDE, names = "DALL_E_3")
void should_support_all_string_model_names(AzureOpenAiImageModelName modelName) {

// given
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class AzureOpenAiLanguageModelIT {
LanguageModel model = AzureOpenAiLanguageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("gpt-35-turbo-instruct")
.deploymentName("gpt-35-turbo-instruct-0914")
.tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT))
.temperature(0.0)
.maxTokens(20)
Expand Down Expand Up @@ -59,7 +59,7 @@ void should_generate_answer_and_finish_reason_length() {

@ParameterizedTest(name = "Testing model {0}")
@EnumSource(value = AzureOpenAiLanguageModelName.class,
mode = EXCLUDE, names = {"TEXT_DAVINCI_002", "TEXT_DAVINCI_002_1"})
mode = EXCLUDE, names = {"GPT_3_5_TURBO_INSTRUCT", "TEXT_DAVINCI_002", "TEXT_DAVINCI_002_1"})
void should_support_all_string_model_names(AzureOpenAiLanguageModelName modelName) {

// given
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ class AzureOpenAiStreamingChatModelIT {

Percentage tokenizerPrecision = withPercentage(5);

public long STREAMING_TIMEOUT = 120;

@ParameterizedTest(name = "Deployment name {0} using {1} with async client set to {2}")
@CsvSource({
"gpt-4o, gpt-4o, true",
Expand Down Expand Up @@ -90,8 +92,8 @@ public void onError(Throwable error) {
}
});

String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
String answer = futureAnswer.get(STREAMING_TIMEOUT, SECONDS);
Response<AiMessage> response = futureResponse.get(STREAMING_TIMEOUT, SECONDS);

assertThat(answer).contains("Paris");
assertThat(response.content().text()).isEqualTo(answer);
Expand Down Expand Up @@ -154,8 +156,8 @@ public void onError(Throwable error) {
}
});

String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
String answer = futureAnswer.get(STREAMING_TIMEOUT, SECONDS);
Response<AiMessage> response = futureResponse.get(STREAMING_TIMEOUT, SECONDS);

assertThat(answer).contains("Paris");
assertThat(response.content().text()).isEqualTo(answer);
Expand Down Expand Up @@ -241,7 +243,7 @@ public void onError(Throwable error) {
}
});

Response<AiMessage> response = futureResponse.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(STREAMING_TIMEOUT, SECONDS);

AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).isNull();
Expand Down Expand Up @@ -282,7 +284,7 @@ public void onError(Throwable error) {
}
});

Response<AiMessage> response2 = futureResponse2.get(30, SECONDS);
Response<AiMessage> response2 = futureResponse2.get(STREAMING_TIMEOUT, SECONDS);
AiMessage aiMessage2 = response2.content();

// then
Expand Down Expand Up @@ -356,7 +358,7 @@ public void onError(Throwable error) {
}
});

Response<AiMessage> response = futureResponse.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(STREAMING_TIMEOUT, SECONDS);

AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).isNull();
Expand Down Expand Up @@ -402,7 +404,7 @@ public void onError(Throwable error) {
}
});

Response<AiMessage> response2 = futureResponse2.get(30, SECONDS);
Response<AiMessage> response2 = futureResponse2.get(STREAMING_TIMEOUT, SECONDS);
AiMessage aiMessage2 = response2.content();

// then
Expand Down Expand Up @@ -560,7 +562,7 @@ public void onComplete(Response<AiMessage> response) {

// when
model.generate(userMessage, handler);
String content = future.get(5, SECONDS);
String content = future.get(STREAMING_TIMEOUT, SECONDS);

// then
assertThat(content).contains("Access denied due to invalid subscription key or wrong API endpoint");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class AzureOpenAiStreamingLanguageModelIT {
StreamingLanguageModel model = AzureOpenAiStreamingLanguageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("gpt-35-turbo-instruct")
.deploymentName("gpt-35-turbo-instruct-0914")
.tokenizer(new AzureOpenAiTokenizer(GPT_3_5_TURBO_INSTRUCT))
.temperature(0.0)
.maxTokens(20)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@

echo "Setting up environment variables..."
echo "----------------------------------"
PROJECT="langchain4j-eastus"
PROJECT="langchain4j-swedencentral"
RESOURCE_GROUP="rg-$PROJECT"
LOCATION="eastus"
LOCATION="swedencentral"
AI_SERVICE="ai-$PROJECT"
TAG="$PROJECT"

Expand Down Expand Up @@ -50,18 +50,6 @@ az cognitiveservices account create \
echo "Deploying Chat Models"
echo "====================="

echo "Deploying a gpt-35-turbo-0301 model..."
echo "----------------------"
az cognitiveservices account deployment create \
--name "$AI_SERVICE" \
--resource-group "$RESOURCE_GROUP" \
--deployment-name "gpt-35-turbo-0301" \
--model-name "gpt-35-turbo" \
--model-version "0125" \
--model-format "OpenAI" \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a gpt-35-turbo-0613 model..."
echo "----------------------"
az cognitiveservices account deployment create \
Expand Down Expand Up @@ -110,30 +98,6 @@ az cognitiveservices account deployment create \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a gpt-4-0125-preview model..."
echo "----------------------"
az cognitiveservices account deployment create \
--name "$AI_SERVICE" \
--resource-group "$RESOURCE_GROUP" \
--deployment-name "gpt-4-0125-preview" \
--model-name "gpt-4" \
--model-version "0125-preview" \
--model-format "OpenAI" \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a gpt-4-1106-preview model..."
echo "----------------------"
az cognitiveservices account deployment create \
--name "$AI_SERVICE" \
--resource-group "$RESOURCE_GROUP" \
--deployment-name "gpt-4-1106-preview" \
--model-name "gpt-4" \
--model-version "1106-preview" \
--model-format "OpenAI" \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a gpt-4-turbo-2024-04-09 model..."
echo "----------------------"
az cognitiveservices account deployment create \
Expand Down Expand Up @@ -186,18 +150,6 @@ az cognitiveservices account deployment create \
echo "Deploying Embedding Models"
echo "=========================="

echo "Deploying a text-embedding-ada-002-1 model..."
echo "----------------------"
az cognitiveservices account deployment create \
--name "$AI_SERVICE" \
--resource-group "$RESOURCE_GROUP" \
--deployment-name "text-embedding-ada-002-1" \
--model-name "text-embedding-ada-002" \
--model-version "1" \
--model-format "OpenAI" \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a text-embedding-ada-002-2 model..."
echo "----------------------"
az cognitiveservices account deployment create \
Expand All @@ -210,18 +162,6 @@ az cognitiveservices account deployment create \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a text-embedding-3-small-1 model..."
echo "----------------------"
az cognitiveservices account deployment create \
--name "$AI_SERVICE" \
--resource-group "$RESOURCE_GROUP" \
--deployment-name "text-embedding-3-small-1" \
--model-name "text-embedding-3-small" \
--model-version "1" \
--model-format "OpenAI" \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a text-embedding-3-large-1 model..."
echo "----------------------"
az cognitiveservices account deployment create \
Expand All @@ -238,18 +178,6 @@ az cognitiveservices account deployment create \
echo "Deploying Image Models"
echo "======================"

echo "Deploying a dall-e-3 model..."
echo "----------------------"
az cognitiveservices account deployment create \
--name "$AI_SERVICE" \
--resource-group "$RESOURCE_GROUP" \
--deployment-name "dall-e-2-20" \
--model-name "dall-e-2" \
--model-version "2.0" \
--model-format "OpenAI" \
--sku-capacity 1 \
--sku-name "Standard"

echo "Deploying a dall-e-3 model..."
echo "----------------------"
az cognitiveservices account deployment create \
Expand Down
2 changes: 1 addition & 1 deletion langchain4j-parent/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.build.outputTimestamp>1714382357</project.build.outputTimestamp>
<openai4j.version>0.17.0</openai4j.version>
<azure-ai-openai.version>1.0.0-beta.8</azure-ai-openai.version>
<azure-ai-openai.version>1.0.0-beta.10</azure-ai-openai.version>
<azure-ai-search.version>11.6.6</azure-ai-search.version>
<azure.storage-blob.version>12.26.1</azure.storage-blob.version>
<azure.storage-common.version>12.25.1</azure.storage-common.version>
Expand Down

0 comments on commit b0fa248

Please sign in to comment.