Skip to content

Commit

Permalink
STS Azure sample (GoogleCloudPlatform#7529)
Browse files Browse the repository at this point in the history
* STS Azure samples

* use python in run script

* adjust run script

* fix python script

* fix variable reference

* fix lint

* address review comments

* fix azure acc example
  • Loading branch information
JesseLovelace authored Jan 10, 2023
1 parent 4fe195c commit 2090e82
Show file tree
Hide file tree
Showing 5 changed files with 189 additions and 25 deletions.
9 changes: 8 additions & 1 deletion .kokoro/tests/run_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ if [[ "$SCRIPT_DEBUG" != "true" ]]; then
SECRET_FILES=("java-docs-samples-service-account.json" \
"java-aiplatform-samples-secrets.txt" \
"java-automl-samples-secrets.txt" \
"java-aws-samples-secrets.txt" \
"java-bigtable-samples-secrets.txt" \
"java-cloud-sql-samples-secrets.txt" \
"java-cts-v4-samples-secrets.txt" \
Expand All @@ -89,6 +88,14 @@ if [[ "$SCRIPT_DEBUG" != "true" ]]; then
source "${KOKORO_GFILE_DIR}/secrets/$SECRET"
fi
done

export STS_AWS_SECRET=`gcloud secrets versions access latest --project cloud-devrel-kokoro-resources --secret=java-storagetransfer-aws`
export AWS_ACCESS_KEY_ID=`S="$STS_AWS_SECRET" python3 -c 'import json,sys,os;obj=json.loads(os.getenv("S"));print (obj["AccessKeyId"]);'`
export AWS_SECRET_ACCESS_KEY=`S="$STS_AWS_SECRET" python3 -c 'import json,sys,os;obj=json.loads(os.getenv("S"));print (obj["SecretAccessKey"]);'`
export STS_AZURE_SECRET=`gcloud secrets versions access latest --project cloud-devrel-kokoro-resources --secret=java-storagetransfer-azure`
export AZURE_STORAGE_ACCOUNT=`S="$STS_AZURE_SECRET" python3 -c 'import json,sys,os;obj=json.loads(os.getenv("S"));print (obj["StorageAccount"]);'`
export AZURE_CONNECTION_STRING=`S="$STS_AZURE_SECRET" python3 -c 'import json,sys,os;obj=json.loads(os.getenv("S"));print (obj["ConnectionString"]);'`
export AZURE_SAS_TOKEN=`S="$STS_AZURE_SECRET" python3 -c 'import json,sys,os;obj=json.loads(os.getenv("S"));print (obj["SAS"]);'`

# Activate service account
gcloud auth activate-service-account \
Expand Down
11 changes: 11 additions & 0 deletions storage-transfer/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -103,5 +103,16 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.azure</groupId>
<artifactId>azure-storage-blob</artifactId>
<version>12.20.1</version>
</dependency>
<dependency>
<groupId>com.azure</groupId>
<artifactId>azure-identity</artifactId>
<version>1.7.2</version>
</dependency>

</dependencies>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -44,31 +44,36 @@ public static void main(String[] args) throws Exception {
public static void quickStartSample(
String projectId, String gcsSourceBucket, String gcsSinkBucket) throws Exception {

StorageTransferServiceClient storageTransfer = StorageTransferServiceClient.create();
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests. After completing all of your requests, call
// the "close" method on the client to safely clean up any remaining background resources,
// or use "try-with-close" statement to do this automatically.
try (StorageTransferServiceClient storageTransfer = StorageTransferServiceClient.create()) {

TransferJob transferJob =
TransferJob.newBuilder()
.setProjectId(projectId)
.setTransferSpec(
TransferSpec.newBuilder()
.setGcsDataSource(GcsData.newBuilder().setBucketName(gcsSourceBucket))
.setGcsDataSink(GcsData.newBuilder().setBucketName(gcsSinkBucket)))
.setStatus(TransferJob.Status.ENABLED)
.build();
TransferJob transferJob =
TransferJob.newBuilder()
.setProjectId(projectId)
.setTransferSpec(
TransferSpec.newBuilder()
.setGcsDataSource(GcsData.newBuilder().setBucketName(gcsSourceBucket))
.setGcsDataSink(GcsData.newBuilder().setBucketName(gcsSinkBucket)))
.setStatus(TransferJob.Status.ENABLED)
.build();

TransferJob response =
storageTransfer.createTransferJob(
CreateTransferJobRequest.newBuilder().setTransferJob(transferJob).build());
TransferJob response =
storageTransfer.createTransferJob(
CreateTransferJobRequest.newBuilder().setTransferJob(transferJob).build());

storageTransfer
.runTransferJobAsync(
RunTransferJobRequest.newBuilder()
.setProjectId(projectId)
.setJobName(response.getName())
.build())
.get();
System.out.println(
"Created and ran transfer job between two GCS buckets with name " + response.getName());
storageTransfer
.runTransferJobAsync(
RunTransferJobRequest.newBuilder()
.setProjectId(projectId)
.setJobName(response.getName())
.build())
.get();
System.out.println(
"Created and ran transfer job between two GCS buckets with name " + response.getName());
}
}
}
// [END storagetransfer_quickstart]
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
/*
* Copyright 2022 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.google.cloud.storage.storagetransfer.samples;

// [START storagetransfer_transfer_from_azure]
import com.google.storagetransfer.v1.proto.StorageTransferServiceClient;
import com.google.storagetransfer.v1.proto.TransferProto;
import com.google.storagetransfer.v1.proto.TransferProto.RunTransferJobRequest;
import com.google.storagetransfer.v1.proto.TransferTypes.AzureBlobStorageData;
import com.google.storagetransfer.v1.proto.TransferTypes.AzureCredentials;
import com.google.storagetransfer.v1.proto.TransferTypes.GcsData;
import com.google.storagetransfer.v1.proto.TransferTypes.TransferJob;
import com.google.storagetransfer.v1.proto.TransferTypes.TransferJob.Status;
import com.google.storagetransfer.v1.proto.TransferTypes.TransferSpec;
import java.io.IOException;
import java.util.concurrent.ExecutionException;

public class TransferFromAzure {
public static void main(String[] args)
throws IOException, ExecutionException, InterruptedException {
// TODO(developer): Replace these variables before running the sample.
// Your Google Cloud Project ID
String projectId = "my-project-id";

// Your Azure Storage Account name
String azureStorageAccount = "my-azure-account";

// The Azure source container to transfer data from
String azureSourceContainer = "my-source-container";

// The GCS bucket to transfer data to
String gcsSinkBucket = "my-sink-bucket";

transferFromAzureBlobStorage(
projectId, azureStorageAccount, azureSourceContainer, gcsSinkBucket);
}

/**
* Creates and runs a transfer job to transfer all data from an Azure container to a GCS bucket.
*/
public static void transferFromAzureBlobStorage(String projectId, String azureStorageAccount,
String azureSourceContainer, String gcsSinkBucket)
throws IOException, ExecutionException, InterruptedException {

// Your Azure SAS token, should be accessed via environment variable
String azureSasToken = System.getenv("AZURE_SAS_TOKEN");

TransferSpec transferSpec = TransferSpec.newBuilder()
.setAzureBlobStorageDataSource(
AzureBlobStorageData.newBuilder()
.setAzureCredentials(AzureCredentials.newBuilder()
.setSasToken(azureSasToken)
.build())
.setContainer(azureSourceContainer)
.setStorageAccount(azureStorageAccount))
.setGcsDataSink(GcsData.newBuilder().setBucketName(gcsSinkBucket).build())
.build();

TransferJob transferJob =
TransferJob.newBuilder()
.setProjectId(projectId)
.setStatus(Status.ENABLED)
.setTransferSpec(transferSpec)
.build();

// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests. After completing all of your requests, call
// the "close" method on the client to safely clean up any remaining background resources,
// or use "try-with-close" statement to do this automatically.
try (StorageTransferServiceClient storageTransfer = StorageTransferServiceClient.create()) {
// Create the transfer job
TransferJob response =
storageTransfer.createTransferJob(TransferProto.CreateTransferJobRequest.newBuilder()
.setTransferJob(transferJob)
.build());

// Run the created job
storageTransfer
.runTransferJobAsync(
RunTransferJobRequest.newBuilder()
.setProjectId(projectId)
.setJobName(response.getName())
.build())
.get();

System.out.println(
"Created and ran a transfer job from "
+ azureSourceContainer
+ " to "
+ gcsSinkBucket
+ " with "
+ "name "
+ response.getName());
}

}
}
// [END storagetransfer_transfer_from_azure]
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.BlobServiceClient;
import com.azure.storage.blob.BlobServiceClientBuilder;
import com.google.api.services.storagetransfer.v1.Storagetransfer;
import com.google.api.services.storagetransfer.v1.model.Date;
import com.google.api.services.storagetransfer.v1.model.GcsData;
Expand All @@ -48,6 +51,7 @@
import com.google.cloud.storage.storagetransfer.samples.QuickstartSample;
import com.google.cloud.storage.storagetransfer.samples.TransferBetweenPosix;
import com.google.cloud.storage.storagetransfer.samples.TransferFromAws;
import com.google.cloud.storage.storagetransfer.samples.TransferFromAzure;
import com.google.cloud.storage.storagetransfer.samples.TransferFromPosix;
import com.google.cloud.storage.storagetransfer.samples.TransferFromS3CompatibleSource;
import com.google.cloud.storage.storagetransfer.samples.TransferToNearline;
Expand All @@ -60,17 +64,22 @@
import com.google.cloud.storage.testing.RemoteStorageHelper;
import com.google.cloud.testing.junit4.MultipleAttemptsRule;
import com.google.cloud.testing.junit4.StdOutCaptureRule;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.gson.Gson;
import com.google.storagetransfer.v1.proto.StorageTransferServiceClient;
import com.google.storagetransfer.v1.proto.TransferProto;
import com.google.storagetransfer.v1.proto.TransferProto.GetGoogleServiceAccountRequest;
import com.google.storagetransfer.v1.proto.TransferTypes;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
Expand All @@ -87,8 +96,14 @@ public class ITStoragetransferSamplesTest {
private static final String SINK_GCS_BUCKET = "sts-test-bucket-sink" + UUID.randomUUID();
private static final String SOURCE_GCS_BUCKET = "sts-test-bucket-source" + UUID.randomUUID();
private static final String AMAZON_BUCKET = "sts-amazon-bucket" + UUID.randomUUID();
private static final String AZURE_BUCKET = "sts-azure-bucket" + UUID.randomUUID();
private static String AZURE_CONNECTION_STRING = System.getenv("AZURE_CONNECTION_STRING");
private static String AZURE_STORAGE_ACCOUNT = System.getenv("AZURE_STORAGE_ACCOUNT");
private static String AZURE_SAS_TOKEN = System.getenv("AZURE_SAS_TOKEN");
private static Storage storage;
private static AmazonS3 s3;
private static BlobServiceClient blobServiceClient;
private static BlobContainerClient blobContainerClient;
private static StorageTransferServiceClient sts;

@Rule public MultipleAttemptsRule multipleAttemptsRule = new MultipleAttemptsRule(5);
Expand Down Expand Up @@ -131,6 +146,12 @@ public static void beforeClass() throws Exception {
s3 = AmazonS3ClientBuilder.standard().withRegion(Regions.US_WEST_1).build();

s3.createBucket(AMAZON_BUCKET);

blobServiceClient = new BlobServiceClientBuilder()
.connectionString(AZURE_CONNECTION_STRING)
.sasToken(AZURE_SAS_TOKEN)
.buildClient();
blobContainerClient = blobServiceClient.createBlobContainer(AZURE_BUCKET);
}

private static void grantBucketsStsPermissions(String serviceAccount, String bucket)
Expand Down Expand Up @@ -213,9 +234,8 @@ public static void afterClass() throws ExecutionException, InterruptedException
RemoteStorageHelper.forceDelete(storage, SINK_GCS_BUCKET, 1, TimeUnit.MINUTES);
RemoteStorageHelper.forceDelete(storage, SOURCE_GCS_BUCKET, 1, TimeUnit.MINUTES);
}

blobContainerClient.delete();
cleanAmazonBucket();

sts.shutdownNow();
}

Expand Down Expand Up @@ -474,4 +494,13 @@ public void testTransferFromS3CompatibleSource() throws Exception {
assertThat(sampleOutput).contains("transferJobs/");
deleteTransferJob(sampleOutput);
}

@Test
public void testTransferFromAzure() throws Exception {
TransferFromAzure.transferFromAzureBlobStorage(
PROJECT_ID, AZURE_STORAGE_ACCOUNT, AZURE_BUCKET, SINK_GCS_BUCKET);
String sampleOutput = stdOutCaptureRule.getCapturedOutputAsUtf8String();
assertThat(sampleOutput).contains("transferJobs/");
deleteTransferJob(sampleOutput);
}
}

0 comments on commit 2090e82

Please sign in to comment.