From 9f01c6b63b004e2c9ade82bea267d9b80dbacd38 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 29 Sep 2016 20:51:47 -0700 Subject: [PATCH] Move to google-cloud [(#544)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/544) --- samples/snippets/async_query.py | 2 +- samples/snippets/export_data_to_gcs.py | 2 +- samples/snippets/load_data_from_file.py | 2 +- samples/snippets/load_data_from_gcs.py | 2 +- samples/snippets/requirements.txt | 2 +- samples/snippets/snippets.py | 8 ++++---- samples/snippets/snippets_test.py | 2 +- samples/snippets/stream_data.py | 2 +- samples/snippets/sync_query.py | 2 +- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/samples/snippets/async_query.py b/samples/snippets/async_query.py index 0ca324240..37192d156 100755 --- a/samples/snippets/async_query.py +++ b/samples/snippets/async_query.py @@ -27,7 +27,7 @@ import time import uuid -from gcloud import bigquery +from google.cloud import bigquery def async_query(query): diff --git a/samples/snippets/export_data_to_gcs.py b/samples/snippets/export_data_to_gcs.py index c9771ea1b..e9037ee01 100644 --- a/samples/snippets/export_data_to_gcs.py +++ b/samples/snippets/export_data_to_gcs.py @@ -29,7 +29,7 @@ import time import uuid -from gcloud import bigquery +from google.cloud import bigquery def export_data_to_gcs(dataset_name, table_name, destination): diff --git a/samples/snippets/load_data_from_file.py b/samples/snippets/load_data_from_file.py index cbb015347..b4f851f1a 100644 --- a/samples/snippets/load_data_from_file.py +++ b/samples/snippets/load_data_from_file.py @@ -27,7 +27,7 @@ import argparse import time -from gcloud import bigquery +from google.cloud import bigquery def load_data_from_file(dataset_name, table_name, source_file_name): diff --git a/samples/snippets/load_data_from_gcs.py b/samples/snippets/load_data_from_gcs.py index 1a577be64..4aa435fad 100644 --- a/samples/snippets/load_data_from_gcs.py +++ b/samples/snippets/load_data_from_gcs.py @@ -29,7 +29,7 @@ import time import uuid -from gcloud import bigquery +from google.cloud import bigquery def load_data_from_gcs(dataset_name, table_name, source): diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 97a207d3a..11a303264 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.18.3 +google-cloud-bigquery==0.20.0 diff --git a/samples/snippets/snippets.py b/samples/snippets/snippets.py index d201a84cb..0e1f5d4b5 100644 --- a/samples/snippets/snippets.py +++ b/samples/snippets/snippets.py @@ -28,8 +28,8 @@ import time import uuid -from gcloud import bigquery -import gcloud.bigquery.job +from google.cloud import bigquery +import google.cloud.bigquery.job def list_projects(): @@ -145,7 +145,7 @@ def list_rows(dataset_name, table_name, project=None): break # Use format to create a simple table. - format_string = '{:<16} ' * len(table.schema) + format_string = '{!s:<16} ' * len(table.schema) # Print schema field names field_names = [field.name for field in table.schema] @@ -177,7 +177,7 @@ def copy_table(dataset_name, table_name, new_table_name, project=None): # Create the table if it doesn't exist. job.create_disposition = ( - gcloud.bigquery.job.CreateDisposition.CREATE_IF_NEEDED) + google.cloud.bigquery.job.CreateDisposition.CREATE_IF_NEEDED) # Start the job. job.begin() diff --git a/samples/snippets/snippets_test.py b/samples/snippets/snippets_test.py index 372cbc834..35f79af7b 100644 --- a/samples/snippets/snippets_test.py +++ b/samples/snippets/snippets_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcloud import bigquery +from google.cloud import bigquery import pytest import snippets diff --git a/samples/snippets/stream_data.py b/samples/snippets/stream_data.py index 5df6be114..a90d432bf 100644 --- a/samples/snippets/stream_data.py +++ b/samples/snippets/stream_data.py @@ -29,7 +29,7 @@ import json from pprint import pprint -from gcloud import bigquery +from google.cloud import bigquery def stream_data(dataset_name, table_name, json_data): diff --git a/samples/snippets/sync_query.py b/samples/snippets/sync_query.py index 59007b537..f21270ed0 100755 --- a/samples/snippets/sync_query.py +++ b/samples/snippets/sync_query.py @@ -26,7 +26,7 @@ import argparse # [START sync_query] -from gcloud import bigquery +from google.cloud import bigquery def sync_query(query):