Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into merge-master-to-77
Browse files Browse the repository at this point in the history
  • Loading branch information
kasobol-msft committed May 4, 2021
2 parents 42f6398 + e78062b commit 4963f33
Show file tree
Hide file tree
Showing 353 changed files with 7,065 additions and 8,240 deletions.
4 changes: 2 additions & 2 deletions eng/spotbugs-aggregate-report/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,12 @@
<dependency>
<groupId>com.microsoft.azure</groupId>
<artifactId>azure-eventhubs</artifactId>
<version>3.3.0-beta.1</version> <!-- {x-version-update;com.microsoft.azure:azure-eventhubs;current} -->
<version>3.2.3</version> <!-- {x-version-update;com.microsoft.azure:azure-eventhubs;current} -->
</dependency>
<dependency>
<groupId>com.microsoft.azure</groupId>
<artifactId>azure-eventhubs-eph</artifactId>
<version>3.3.0-beta.1</version> <!-- {x-version-update;com.microsoft.azure:azure-eventhubs-eph;current} -->
<version>3.2.3</version> <!-- {x-version-update;com.microsoft.azure:azure-eventhubs-eph;current} -->
</dependency>
<dependency>
<groupId>com.microsoft.azure</groupId>
Expand Down
4 changes: 2 additions & 2 deletions eng/versioning/version_data.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ com.microsoft.azure.cognitiveservices:azure-cognitiveservices-customvision-predi
com.microsoft.azure.cognitiveservices:azure-cognitiveservices-customvision-training;1.1.0-beta.3;1.1.0-beta.3
com.microsoft.azure.cognitiveservices:azure-cognitiveservices-faceapi;1.1.0-beta.1;1.1.0-beta.1
com.microsoft.azure.cognitiveservices:azure-cognitiveservices-qnamaker;1.0.0-beta.2;1.0.0-beta.3
com.microsoft.azure:azure-eventhubs;3.2.2;3.3.0-beta.1
com.microsoft.azure:azure-eventhubs-eph;3.2.2;3.3.0-beta.1
com.microsoft.azure:azure-eventhubs;3.2.2;3.2.3
com.microsoft.azure:azure-eventhubs-eph;3.2.2;3.2.3
com.microsoft.azure:azure-eventhubs-extensions;3.2.2;3.3.0-beta.1
com.microsoft.azure:azure-keyvault;1.2.4;1.3.0-beta.1
com.microsoft.azure:azure-keyvault-complete;1.2.4;1.3.0-beta.1
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Release History

## 2.0.0-beta.1 (Unreleased)
## 2.0.0-beta.1 (2021-05-04)
### Breaking Changes
- Change group id from `com.microsoft.azure` to `com.azure.spring`.
- Change artifact id from `spring-cloud-azure-appconfiguration-config-web` to `azure-spring-cloud-appconfiguration-config-web`.
- Added a new Push based Refresh method. Two Spring Actuator endpoints have been added `appconfiguration-refresh` and `appconfiguration-refresh-bus`. The first triggers the cache to reset on configurations on an application. The second triggers a refresh on all instaces subscribed to the same Service Bus.
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
# Release History

## 2.0.0-beta.1 (Unreleased)
## 2.0.0-beta.1 (2021-05-04)
### Breaking Changes
- Change group id from `com.microsoft.azure` to `com.azure.spring`.
- Change artifact id from `spring-cloud-azure-appconfiguration-config` to `azure-spring-cloud-appconfiguration-config`.
- Format and options of library configuration has completely changed. See Readme in Starter
- Use of a Watch Key is now required see `spring.cloud.azure.appconfiguration.stores[0].monitoring.triggers`
- Added support for JSON content type
- Feature Management config loading is no longer on by default.
- Users can now select multiple groups of keys from one store see `spring.cloud.azure.appconfiguration.stores[0].selects`. Same default select happens as before.
- By default, `spring.profiles.active` is used as the label of all filters. This can be overridden using selects. If no profile is set `\0` is used i.e. `(No Label)
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Release History

## 2.0.0-beta.1 (Unreleased)
## 2.0.0-beta.1 (2021-05-04)
### Breaking Changes
- Change group id from `com.microsoft.azure` to `com.azure.spring`.
- Change artifact id from `spring-cloud-azure-feature-management-web` to `azure-spring-cloud-feature-management-web`.
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Release History

## 2.0.0-beta.1 (Unreleased)
## 2.0.0-beta.1 (2021-05-04)
### Breaking Changes
- Change group id from `com.microsoft.azure` to `com.azure.spring`.
- Change artifact id from `spring-cloud-azure-feature-management` to `azure-spring-cloud-feature-management`.
- New Targeting Feature filter has been added.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Release History

## 2.0.0-beta.1 (Unreleased)
## 2.0.0-beta.1 (2021-05-04)
### Breaking Changes
- Change group id from `com.microsoft.azure` to `com.azure.spring`.
- Change artifact id from `spring-cloud-azure-appconfiguration-config` to `azure-spring-cloud-starter-appconfiguration-config`.
36 changes: 35 additions & 1 deletion sdk/communication/test-resources.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,33 @@
"defaultValue": "communication",
"type": "string"
},
"communicationServicesEndpointSuffix": {
"defaultValue": ".communication.azure.com",
"type": "string"
},
"testApplicationOid": {
"type": "string",
"metadata": {
"description": "The client OID to grant access to test resources."
}
},
"tenantId": {
"type": "String",
"metadata": {
"description": "The tenant id to which the application and resources belong."
}
},
"testApplicationId": {
"type": "String",
"metadata": {
"description": "The application client id used to run tests."
}
},
"testApplicationSecret": {
"type": "String",
"metadata": {
"description": "The application client secret used to run tests."
}
}
},
"variables": {
Expand Down Expand Up @@ -46,13 +68,25 @@
}
],
"outputs": {
"AZURE_TENANT_ID": {
"type": "String",
"value": "[parameters('tenantId')]"
},
"AZURE_CLIENT_ID": {
"type": "String",
"value": "[parameters('testApplicationId')]"
},
"AZURE_CLIENT_SECRET": {
"type": "String",
"value": "[parameters('testApplicationSecret')]"
},
"COMMUNICATION_CONNECTION_STRING": {
"type": "string",
"value": "[listKeys(resourceId('Microsoft.Communication/CommunicationServices',variables('uniqueSubDomainName')), '2020-08-20-preview').primaryConnectionString]"
},
"COMMUNICATION_SERVICE_ENDPOINT": {
"type": "string",
"value": "[concat('https://', parameters('baseName'), '-', parameters('endpointPrefix'), '.communication.azure.com')]"
"value": "[concat('https://', parameters('baseName'), '-', parameters('endpointPrefix'), parameters('communicationServicesEndpointSuffix'))]"
},
"COMMUNICATION_SERVICE_ACCESS_KEY": {
"type": "string",
Expand Down
13 changes: 7 additions & 6 deletions sdk/communication/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,13 @@ parameters:
stages:
- template: /eng/pipelines/templates/stages/archetype-sdk-tests.yml
parameters:
CloudConfig:
Public:
SubscriptionConfigurations:
- $(sub-config-azure-cloud-test-resources)
- $(sub-config-communication-services-cloud-test-resources-common)
- $(sub-config-communication-services-cloud-test-resources-java)
Clouds: Public
Artifacts:
- name: azure-communication-chat
groupId: com.azure
Expand Down Expand Up @@ -57,10 +64,4 @@ stages:
EnvVars:
SKIP_PHONENUMBER_INTEGRATION_TESTS: TRUE
SKIP_LIVE_TEST: TRUE
AZURE_SUBSCRIPTION_ID: $(acs-subscription-id)
COMMUNICATION_LIVETEST_CONNECTION_STRING: $(communication-livetest-connection-string)
COMMUNICATION_PHONE_NUMBER: $(communication-livetest-phone-number)
AZURE_TENANT_ID: $(aad-azure-sdk-test-tenant-id)
AZURE_CLIENT_SECRET: $(aad-azure-sdk-test-client-secret)
AZURE_CLIENT_ID: $(aad-azure-sdk-test-client-id)
TEST_PACKAGES_ENABLED: ${{ parameters.TestPackagesEnabled }}
9 changes: 7 additions & 2 deletions sdk/cosmos/azure-cosmos-benchmark/ctl/run_benchmark.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,13 @@ else
diagnostics_threshold_duration=$ctl_diagnostics_threshold_duration
fi

if [ -z "$ctl_number_of_precreated_documents" ]; then
number_of_precreated_documents=10000
else
number_of_precreated_documents=$ctl_number_of_precreated_documents
fi

connection_mode=Direct
number_of_precreated_documents=10000
gateway_connection_poolsize=5

protocol=Tcp
Expand Down Expand Up @@ -92,7 +97,7 @@ additional_benchmark_options="$additional_benchmark_options -maxConnectionPoolSi
if [ -z "$ctl_graphite_endpoint" ]; then
java -Xmx8g -Xms8g $jvm_opt -Dcosmos.directModeProtocol=$protocol -Dazure.cosmos.directModeProtocol=$protocol -DCOSMOS.CLIENT_TELEMETRY_ENDPOINT=$ctl_client_telemetry_endpoint -jar "$jar_file" -serviceEndpoint "$service_endpoint" -masterKey "$master_key" -databaseId "$db_name" -collectionId "$col_name" -readWriteQueryPct "$read_write_query_pct" -diagnosticsThresholdDuration "$diagnostics_threshold_duration" -numberOfCollectionForCtl "$number_Of_collection" -throughput $throughput -consistencyLevel $consistency_level -concurrency $concurrency -numberOfOperations $number_of_operations -operation $operation -connectionMode $connection_mode -maxRunningTimeDuration $max_running_time_duration -numberOfPreCreatedDocuments $number_of_precreated_documents $additional_benchmark_options 2>&1 | tee -a "$log_filename"
else
java -Xmx8g -Xms8g $jvm_opt -Dcosmos.directModeProtocol=$protocol -Dazure.cosmos.directModeProtocol=$protocol -DCOSMOS.CLIENT_TELEMETRY_ENDPOINT=$ctl_client_telemetry_endpoint -jar "$jar_file" -serviceEndpoint "$service_endpoint" -masterKey "$master_key" -databaseId "$db_name" -collectionId "$col_name" -readWriteQueryPct "$read_write_query_pct" -diagnosticsThresholdDuration "$diagnostics_threshold_duration" -numberOfCollectionForCtl "$number_Of_collection" -throughput $throughput -consistencyLevel $consistency_level -concurrency $concurrency -numberOfOperations $number_of_operations -operation $operation -connectionMode $connection_mode -maxRunningTimeDuration $max_running_time_duration -graphiteEndpoint $ctl_graphite_endpoint -numberOfPreCreatedDocuments $number_of_precreated_documents $additional_benchmark_options 2>&1 | tee -a "$log_filename"
java -Xmx8g -Xms8g $jvm_opt -Dcosmos.directModeProtocol=$protocol -Dazure.cosmos.directModeProtocol=$protocol -DCOSMOS.CLIENT_TELEMETRY_ENDPOINT=$ctl_client_telemetry_endpoint -jar "$jar_file" -serviceEndpoint "$service_endpoint" -masterKey "$master_key" -databaseId "$db_name" -collectionId "$col_name" -readWriteQueryPct "$read_write_query_pct" -diagnosticsThresholdDuration "$diagnostics_threshold_duration" -numberOfCollectionForCtl "$number_Of_collection" -throughput $throughput -consistencyLevel $consistency_level -concurrency $concurrency -numberOfOperations $number_of_operations -operation $operation -connectionMode $connection_mode -maxRunningTimeDuration $max_running_time_duration -graphiteEndpoint $ctl_graphite_endpoint -numberOfPreCreatedDocuments $number_of_precreated_documents $ctl_accountNameInGraphiteReporter $additional_benchmark_options 2>&1 | tee -a "$log_filename"
fi

end=$(date +%s)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,9 @@ public Duration convert(String value) {
@Parameter(names = "-testScenario", description = "The test scenario (GET, QUERY) for the LinkedInCtlWorkload")
private String testScenario = "GET";

@Parameter(names = "-accountNameInGraphiteReporter", description = "if set, account name with be appended in graphite reporter")
private boolean accountNameInGraphiteReporter = false;

public enum Environment {
Daily, // This is the CTL environment where we run the workload for a fixed number of hours
Staging; // This is the CTL environment where the worload runs as a long running job
Expand Down Expand Up @@ -283,6 +286,10 @@ public boolean isSync() {
return useSync;
}

public boolean isAccountNameInGraphiteReporter() {
return accountNameInGraphiteReporter;
}

public Duration getMaxRunningTimeDuration() {
return maxRunningTimeDuration;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
import com.codahale.metrics.graphite.Graphite;
import com.codahale.metrics.graphite.GraphiteReporter;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.concurrent.TimeUnit;


Expand All @@ -30,8 +32,19 @@ public static ScheduledReporter create(final Configuration configuration,
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));

String graphiteReporterPrefix = configuration.getOperationType().name();
if (configuration.isAccountNameInGraphiteReporter()) {
try {
URI uri = new URI(configuration.getServiceEndpoint());
graphiteReporterPrefix = graphiteReporterPrefix + "-" + uri.getHost().substring(0, uri.getHost().indexOf("."));
} catch (URISyntaxException e) {
// do nothing, graphiteReporterPrefix will be configuration.getOperationType().name()
}
}

return GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.prefixedWith(graphiteReporterPrefix)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
"spark.sql(\"CREATE DATABASE IF NOT EXISTS cosmosCatalog.{};\".format(cosmosDatabaseName))\n",
"\n",
"# create a cosmos container\n",
"spark.sql(\"CREATE TABLE IF NOT EXISTS cosmosCatalog.{}.{} using cosmos.items TBLPROPERTIES(partitionKeyPath = '/id', manualThroughput = '1100')\".format(cosmosDatabaseName, cosmosContainerName))"
"spark.sql(\"CREATE TABLE IF NOT EXISTS cosmosCatalog.{}.{} using cosmos.oltp TBLPROPERTIES(partitionKeyPath = '/id', manualThroughput = '1100')\".format(cosmosDatabaseName, cosmosContainerName))"
]
},
{
Expand All @@ -77,7 +77,7 @@
"spark.createDataFrame(((\"cat-alive\", \"Schrodinger cat\", 2, True), (\"cat-dead\", \"Schrodinger cat\", 2, False)))\\\n",
" .toDF(\"id\",\"name\",\"age\",\"isAlive\") \\\n",
" .write\\\n",
" .format(\"cosmos.items\")\\\n",
" .format(\"cosmos.oltp\")\\\n",
" .options(**cfg)\\\n",
" .mode(\"APPEND\")\\\n",
" .save()"
Expand All @@ -97,7 +97,7 @@
"outputs": [],
"source": [
"# Show the schema of the table and data without auto schema inference\n",
"df = spark.read.format(\"cosmos.items\").options(**cfg).load()\n",
"df = spark.read.format(\"cosmos.oltp\").options(**cfg).load()\n",
"df.printSchema()\n",
"\n",
"df.show()"
Expand All @@ -117,7 +117,7 @@
"outputs": [],
"source": [
"# Show the schema of the table and data with auto schema inference\n",
"df = spark.read.format(\"cosmos.items\").options(**cfgWithAutoSchemaInference).load()\n",
"df = spark.read.format(\"cosmos.oltp\").options(**cfgWithAutoSchemaInference).load()\n",
"df.printSchema()\n",
"\n",
"df.show()"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,16 +105,16 @@
"CREATE DATABASE IF NOT EXISTS cosmosCatalog.SampleDatabase;\n",
"\n",
"CREATE TABLE IF NOT EXISTS cosmosCatalog.SampleDatabase.GreenTaxiRecords\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"TBLPROPERTIES(partitionKeyPath = '/id', autoScaleMaxThroughput = '100000', indexingPolicy = 'OnlySystemProperties');\n",
"\n",
"CREATE TABLE IF NOT EXISTS cosmosCatalog.SampleDatabase.GreenTaxiRecordsCFSink\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"TBLPROPERTIES(partitionKeyPath = '/id', autoScaleMaxThroughput = '100000', indexingPolicy = 'OnlySystemProperties');\n",
"\n",
"/* NOTE: It is important to enable TTL (can be off/-1 by default) on the throughput control container */\n",
"CREATE TABLE IF NOT EXISTS cosmosCatalog.SampleDatabase.ThroughputControl\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"OPTIONS(spark.cosmos.database = 'SampleDatabase')\n",
"TBLPROPERTIES(partitionKeyPath = '/groupId', autoScaleMaxThroughput = '4000', indexingPolicy = 'AllProperties', defaultTtlInSeconds = '-1');"
]
Expand Down Expand Up @@ -248,7 +248,7 @@
"\n",
"df_NYCGreenTaxi_Input \\\n",
" .write \\\n",
" .format(\"cosmos.items\") \\\n",
" .format(\"cosmos.oltp\") \\\n",
" .mode(\"Append\") \\\n",
" .options(**writeCfg) \\\n",
" .save()\n",
Expand Down Expand Up @@ -324,7 +324,7 @@
" \"spark.cosmos.read.inferSchema.enabled\" : \"false\"\n",
"}\n",
"\n",
"query_df = spark.read.format(\"cosmos.items\").options(**readCfg).load()\n",
"query_df = spark.read.format(\"cosmos.oltp\").options(**readCfg).load()\n",
"count_query = query_df.count()\n",
"print(\"Number of records retrieved via query: \", count_query) \n",
"print(\"Finished validation via query: \", datetime.datetime.utcnow().strftime(\"%Y-%m-%d %H:%M:%S.%f\"))\n",
Expand Down Expand Up @@ -501,7 +501,7 @@
"%sql\n",
"CREATE TABLE cosmosCatalog.SampleDatabase.GreenTaxiRecordsView \n",
" (id STRING, _ts TIMESTAMP, vendorID INT, totalAmount DOUBLE)\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"TBLPROPERTIES(isCosmosView = 'True')\n",
"OPTIONS (\n",
" spark.cosmos.database = 'SampleDatabase',\n",
Expand Down Expand Up @@ -542,7 +542,7 @@
"source": [
"%sql\n",
"CREATE TABLE cosmosCatalog.SampleDatabase.GreenTaxiRecordsAnotherView \n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"TBLPROPERTIES(isCosmosView = 'True')\n",
"OPTIONS (\n",
" spark.cosmos.database = 'SampleDatabase',\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,16 +105,16 @@
"CREATE DATABASE IF NOT EXISTS cosmosCatalog.SampleDatabase;\n",
"\n",
"CREATE TABLE IF NOT EXISTS cosmosCatalog.SampleDatabase.GreenTaxiRecords\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"TBLPROPERTIES(partitionKeyPath = '/id', autoScaleMaxThroughput = '100000', indexingPolicy = 'OnlySystemProperties');\n",
"\n",
"CREATE TABLE IF NOT EXISTS cosmosCatalog.SampleDatabase.GreenTaxiRecordsCFSink\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"TBLPROPERTIES(partitionKeyPath = '/id', autoScaleMaxThroughput = '100000', indexingPolicy = 'OnlySystemProperties');\n",
"\n",
"/* NOTE: It is important to enable TTL (can be off/-1 by default) on the throughput control container */\n",
"CREATE TABLE IF NOT EXISTS cosmosCatalog.SampleDatabase.ThroughputControl\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"OPTIONS(spark.cosmos.database = 'SampleDatabase')\n",
"TBLPROPERTIES(partitionKeyPath = '/groupId', autoScaleMaxThroughput = '4000', indexingPolicy = 'AllProperties', defaultTtlInSeconds = '-1');"
]
Expand Down Expand Up @@ -281,7 +281,7 @@
"\n",
" microBatchQuery = df_withTimestamps \\\n",
" .writeStream \\\n",
" .format(\"cosmos.items\") \\\n",
" .format(\"cosmos.oltp\") \\\n",
" .queryName(runId) \\\n",
" .options(**writeCfg) \\\n",
" .outputMode(\"append\") \\\n",
Expand Down Expand Up @@ -332,7 +332,7 @@
"%sql\n",
"CREATE TABLE cosmosCatalog.SampleDatabase.GreenTaxiRecordsCFSinkView \n",
" (id STRING)\n",
"USING cosmos.items\n",
"USING cosmos.oltp\n",
"TBLPROPERTIES(isCosmosView = 'True')\n",
"OPTIONS (\n",
" spark.cosmos.database = 'SampleDatabase',\n",
Expand Down
Loading

0 comments on commit 4963f33

Please sign in to comment.