From 26f4fa7be721f50f6889ba561b00c3d13d75e3cd Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Fri, 22 Sep 2017 15:25:22 -0700 Subject: [PATCH] BigQuery: replaces table.create() with client.create_table() (#4038) * adds client.create_table() * removes table.create() * passes system tests * fixes rebase conflicts * fixes coverage --- bigquery/google/cloud/bigquery/client.py | 18 ++ bigquery/google/cloud/bigquery/table.py | 18 -- bigquery/tests/system.py | 83 ++++----- bigquery/tests/unit/test_client.py | 145 +++++++++++++++ bigquery/tests/unit/test_table.py | 214 ----------------------- 5 files changed, 205 insertions(+), 273 deletions(-) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 1bd46e407968..bbeac294680d 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -190,6 +190,24 @@ def create_dataset(self, dataset): method='POST', path=path, data=dataset._build_resource()) return Dataset.from_api_repr(api_response) + def create_table(self, table): + """API call: create a table via a PUT request + + See + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert + + :type table: :class:`~google.cloud.bigquery.table.Table` + :param table: A ``Table`` populated with the desired initial state. + + :rtype: ":class:`~google.cloud.bigquery.table.Table`" + :returns: a new ``Table`` returned from the service. + """ + path = '/projects/%s/datasets/%s/tables' % ( + table.project, table.dataset_id) + api_response = self._connection.api_request( + method='POST', path=path, data=table._build_resource()) + return Table.from_api_repr(api_response, self) + def get_dataset(self, dataset_ref): """Fetch the dataset referenced by ``dataset_ref`` diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 7173e8603a8e..03214e52a7be 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -653,24 +653,6 @@ def _build_resource(self): return resource - def create(self, client=None): - """API call: create the table via a PUT request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert - - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. - """ - client = self._require_client(client) - path = '/projects/%s/datasets/%s/tables' % ( - self._project, self._dataset_id) - api_response = client._connection.api_request( - method='POST', path=path, data=self._build_resource()) - self._set_properties(api_response) - def exists(self, client=None): """API call: test for the existence of the table via a GET request diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index de19a7da01c5..81b3219275b2 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -180,18 +180,19 @@ def test_list_datasets(self): def test_create_table(self): dataset = self.temp_dataset(_make_dataset_id('create_table')) - - TABLE_NAME = 'test_table' + table_id = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table = Table(dataset.table(TABLE_NAME), schema=[full_name, age], - client=Config.CLIENT) - self.assertFalse(table.exists()) - table.create() + table_arg = Table(dataset.table(table_id), schema=[full_name, age], + client=Config.CLIENT) + self.assertFalse(table_arg.exists()) + + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) + self.assertTrue(table.exists()) - self.assertEqual(table.table_id, TABLE_NAME) + self.assertEqual(table.table_id, table_id) def test_get_table_w_public_dataset(self): PUBLIC = 'bigquery-public-data' @@ -227,10 +228,10 @@ def test_list_dataset_tables(self): mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') for table_name in tables_to_create: - created_table = Table(dataset.table(table_name), - schema=[full_name, age], - client=Config.CLIENT) - created_table.create() + table = Table(dataset.table(table_name), + schema=[full_name, age], + client=Config.CLIENT) + created_table = retry_403(Config.CLIENT.create_table)(table) self.to_delete.insert(0, created_table) # Retrieve the tables. @@ -249,10 +250,10 @@ def test_patch_table(self): full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table = Table(dataset.table(TABLE_NAME), schema=[full_name, age], - client=Config.CLIENT) - self.assertFalse(table.exists()) - table.create() + table_arg = Table(dataset.table(TABLE_NAME), schema=[full_name, age], + client=Config.CLIENT) + self.assertFalse(table_arg.exists()) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) self.assertTrue(table.exists()) self.assertIsNone(table.friendly_name) @@ -268,10 +269,10 @@ def test_update_table(self): full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table = Table(dataset.table(TABLE_NAME), schema=[full_name, age], - client=Config.CLIENT) - self.assertFalse(table.exists()) - table.create() + table_arg = Table(dataset.table(TABLE_NAME), schema=[full_name, age], + client=Config.CLIENT) + self.assertFalse(table_arg.exists()) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) self.assertTrue(table.exists()) voter = bigquery.SchemaField('voter', 'BOOLEAN', mode='NULLABLE') @@ -309,10 +310,10 @@ def test_insert_data_then_dump_table(self): mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') now = bigquery.SchemaField('now', 'TIMESTAMP') - table = Table(dataset.table(TABLE_NAME), schema=[full_name, age, now], - client=Config.CLIENT) - self.assertFalse(table.exists()) - table.create() + table_arg = Table(dataset.table(TABLE_NAME), + schema=[full_name, age, now], client=Config.CLIENT) + self.assertFalse(table_arg.exists()) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) self.assertTrue(table.exists()) @@ -346,9 +347,9 @@ def test_load_table_from_local_file_then_dump_table(self): full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table = Table(dataset.table(TABLE_NAME), schema=[full_name, age], - client=Config.CLIENT) - table.create() + table_arg = Table(dataset.table(TABLE_NAME), schema=[full_name, age], + client=Config.CLIENT) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) with _NamedTemporaryFile() as temp: @@ -450,9 +451,9 @@ def test_load_table_from_storage_then_dump_table(self): full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table = Table(dataset.table(TABLE_NAME), schema=[full_name, age], - client=Config.CLIENT) - table.create() + table_arg = Table(dataset.table(TABLE_NAME), schema=[full_name, age], + client=Config.CLIENT) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) job = Config.CLIENT.load_table_from_storage( @@ -652,9 +653,9 @@ def test_job_cancel(self): full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table = Table(dataset.table(TABLE_NAME), schema=[full_name, age], - client=Config.CLIENT) - table.create() + table_arg = Table(dataset.table(TABLE_NAME), schema=[full_name, age], + client=Config.CLIENT) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) job = Config.CLIENT.run_async_query(JOB_NAME, QUERY) @@ -839,9 +840,9 @@ def _load_table_for_dml(self, rows, dataset_id, table_id): dataset = self.temp_dataset(dataset_id) greeting = bigquery.SchemaField( 'greeting', 'STRING', mode='NULLABLE') - table = Table(dataset.table(table_id), schema=[greeting], - client=Config.CLIENT) - table.create() + table_arg = Table(dataset.table(table_id), schema=[greeting], + client=Config.CLIENT) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) with _NamedTemporaryFile() as temp: @@ -1228,9 +1229,9 @@ def test_insert_nested_nested(self): ] table_name = 'test_table' dataset = self.temp_dataset(_make_dataset_id('issue_2951')) - table = Table(dataset.table(table_name), schema=schema, - client=Config.CLIENT) - table.create() + table_arg = Table(dataset.table(table_name), schema=schema, + client=Config.CLIENT) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) table.insert_data(to_insert) @@ -1245,9 +1246,9 @@ def test_create_table_insert_fetch_nested_schema(self): dataset = self.temp_dataset( _make_dataset_id('create_table_nested_schema')) schema = _load_json_schema() - table = Table(dataset.table(table_name), schema=schema, - client=Config.CLIENT) - table.create() + table_arg = Table(dataset.table(table_name), schema=schema, + client=Config.CLIENT) + table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) self.assertTrue(table.exists()) self.assertEqual(table.table_id, table_name) diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index 50c324ebfc32..96a9dd240132 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -399,6 +399,151 @@ def test_create_dataset_w_attrs(self): self.assertEqual(ds.default_table_expiration_ms, 3600) self.assertEqual(ds.labels, LABELS) + def test_create_table_w_day_partition(self): + from google.cloud.bigquery.table import Table + + project = 'PROJECT' + dataset_id = 'dataset_id' + table_id = 'table-id' + path = 'projects/%s/datasets/%s/tables' % ( + project, dataset_id) + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + resource = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + } + conn = client._connection = _Connection(resource) + table_ref = client.dataset(dataset_id).table(table_id) + table = Table(table_ref, client=client) + table.partitioning_type = 'DAY' + + got = client.create_table(table) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % path) + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'timePartitioning': {'type': 'DAY'}, + } + self.assertEqual(req['data'], sent) + self.assertEqual(table.partitioning_type, "DAY") + self.assertEqual(got.table_id, table_id) + + def test_create_table_w_day_partition_and_expire(self): + from google.cloud.bigquery.table import Table + + project = 'PROJECT' + dataset_id = 'dataset_id' + table_id = 'table-id' + path = 'projects/%s/datasets/%s/tables' % ( + project, dataset_id) + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + resource = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + } + conn = client._connection = _Connection(resource) + table_ref = client.dataset(dataset_id).table(table_id) + table = Table(table_ref, client=client) + table.partitioning_type = 'DAY' + table.partition_expiration = 100 + + got = client.create_table(table) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % path) + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'timePartitioning': {'type': 'DAY', 'expirationMs': 100}, + } + self.assertEqual(req['data'], sent) + self.assertEqual(table.partitioning_type, "DAY") + self.assertEqual(table.partition_expiration, 100) + self.assertEqual(got.table_id, table_id) + + def test_create_table_w_schema_and_query(self): + from google.cloud.bigquery.table import Table, SchemaField + + project = 'PROJECT' + dataset_id = 'dataset_id' + table_id = 'table-id' + path = 'projects/%s/datasets/%s/tables' % ( + project, dataset_id) + query = 'SELECT * from %s:%s' % (dataset_id, table_id) + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + resource = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}] + }, + 'view': { + 'query': query, + 'useLegacySql': True + }, + } + schema = [ + SchemaField('full_name', 'STRING', mode='REQUIRED'), + SchemaField('age', 'INTEGER', mode='REQUIRED') + ] + conn = client._connection = _Connection(resource) + table_ref = client.dataset(dataset_id).table(table_id) + table = Table(table_ref, schema=schema, client=client) + table.view_query = query + + got = client.create_table(table) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % path) + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}] + }, + 'view': {'query': query}, + } + self.assertEqual(req['data'], sent) + self.assertEqual(got.table_id, table_id) + self.assertEqual(got.project, project) + self.assertEqual(got.dataset_id, dataset_id) + self.assertEqual(got.schema, schema) + self.assertEqual(got.view_query, query) + def test_get_table(self): project = 'PROJECT' dataset_id = 'dataset_id' diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index f2c2297d244b..22dce9012188 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -557,135 +557,6 @@ def test_from_api_repr_w_properties(self): self.assertIs(table._client, client) self._verifyResourceProperties(table, RESOURCE) - def test_create_new_day_partitioned_table(self): - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - RESOURCE = self._makeResource() - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref, client=client) - table.partitioning_type = 'DAY' - table.create() - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'timePartitioning': {'type': 'DAY'}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - - def test_create_w_bound_client(self): - from google.cloud.bigquery.table import SchemaField - - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - RESOURCE = self._makeResource() - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(table_ref, schema=[full_name, age], - client=client) - - table.create() - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - - def test_create_w_partition_no_expire(self): - from google.cloud.bigquery.table import SchemaField - - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - RESOURCE = self._makeResource() - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(table_ref, schema=[full_name, age], - client=client) - - self.assertIsNone(table.partitioning_type) - table.partitioning_type = "DAY" - self.assertEqual(table.partitioning_type, "DAY") - table.create() - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'timePartitioning': {'type': 'DAY'}, - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - - def test_create_w_partition_and_expire(self): - from google.cloud.bigquery.table import SchemaField - - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - RESOURCE = self._makeResource() - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(table_ref, schema=[full_name, age], - client=client) - self.assertIsNone(table.partition_expiration) - table.partition_expiration = 100 - self.assertEqual(table.partitioning_type, "DAY") - self.assertEqual(table.partition_expiration, 100) - table.create() - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'timePartitioning': {'type': 'DAY', 'expirationMs': 100}, - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - def test_partition_type_setter_bad_type(self): from google.cloud.bigquery.table import SchemaField @@ -833,91 +704,6 @@ def test_list_partitions(self): client=client) self.assertEqual(table.list_partitions(), [20160804, 20160805]) - def test_create_w_alternate_client(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _millis - - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - DESCRIPTION = 'DESCRIPTION' - TITLE = 'TITLE' - QUERY = 'select fullname, age from person_ages' - RESOURCE = self._makeResource() - RESOURCE['description'] = DESCRIPTION - RESOURCE['friendlyName'] = TITLE - self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, - tzinfo=UTC) - RESOURCE['expirationTime'] = _millis(self.EXP_TIME) - RESOURCE['view'] = {} - RESOURCE['view']['query'] = QUERY - RESOURCE['type'] = 'VIEW' - conn1 = _Connection() - client1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref, client=client1) - table.friendly_name = TITLE - table.description = DESCRIPTION - table.view_query = QUERY - - table.create(client=client2) - - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'description': DESCRIPTION, - 'friendlyName': TITLE, - 'view': {'query': QUERY}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - - def test_create_w_missing_output_properties(self): - # In the wild, the resource returned from 'dataset.create' sometimes - # lacks 'creationTime' / 'lastModifiedTime' - from google.cloud.bigquery.table import SchemaField - - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - RESOURCE = self._makeResource() - del RESOURCE['creationTime'] - del RESOURCE['lastModifiedTime'] - self.WHEN = None - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(table_ref, schema=[full_name, age], - client=client) - - table.create() - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - def test_exists_miss_w_bound_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( self.PROJECT, self.DS_ID, self.TABLE_NAME)