diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index f513a98d23cd..da6962daec1b 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -774,7 +774,7 @@ def _build_resource(self): 'destinationTable': { 'projectId': self.destination.project, 'datasetId': self.destination.dataset_id, - 'tableId': self.destination.name, + 'tableId': self.destination.table_id, }, }, }, @@ -901,7 +901,7 @@ def _build_resource(self): source_refs = [{ 'projectId': table.project, 'datasetId': table.dataset_id, - 'tableId': table.name, + 'tableId': table.table_id, } for table in self.sources] resource = { @@ -915,7 +915,7 @@ def _build_resource(self): 'destinationTable': { 'projectId': self.destination.project, 'datasetId': self.destination.dataset_id, - 'tableId': self.destination.name, + 'tableId': self.destination.table_id, }, }, }, @@ -1059,7 +1059,7 @@ def _build_resource(self): source_ref = { 'projectId': self.source.project, 'datasetId': self.source.dataset_id, - 'tableId': self.source.name, + 'tableId': self.source.table_id, } resource = { @@ -1248,7 +1248,7 @@ def _destination_table_resource(self): return { 'projectId': self.destination.project, 'datasetId': self.destination.dataset_id, - 'tableId': self.destination.name, + 'tableId': self.destination.table_id, } def _populate_config_resource_booleans(self, configuration): diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index e06e79271d0a..f9c07b1e8ee6 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -90,8 +90,8 @@ class Table(object): See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables - :type name: str - :param name: the name of the table + :type table_id: str + :param table_id: the ID of the table :type dataset: :class:`google.cloud.bigquery.dataset.Dataset` :param dataset: The dataset which contains the table. @@ -102,8 +102,8 @@ class Table(object): _schema = None - def __init__(self, name, dataset, schema=()): - self.name = name + def __init__(self, table_id, dataset, schema=()): + self._table_id = table_id self._dataset = dataset self._properties = {} # Let the @property do validation. @@ -127,14 +127,23 @@ def dataset_id(self): """ return self._dataset.dataset_id + @property + def table_id(self): + """ID of the table. + + :rtype: str + :returns: the table ID. + """ + return self._table_id + @property def path(self): """URL path for the table's APIs. :rtype: str - :returns: the path based on project and dataste name. + :returns: the path based on project, dataset and table IDs. """ - return '%s/tables/%s' % (self._dataset.path, self.name) + return '%s/tables/%s' % (self._dataset.path, self.table_id) @property def schema(self): @@ -224,11 +233,11 @@ def self_link(self): return self._properties.get('selfLink') @property - def table_id(self): - """ID for the table resource. + def full_table_id(self): + """ID for the table, in the form ``project_id:dataset_id:table_id``. :rtype: str, or ``NoneType`` - :returns: the ID (None until set from the server). + :returns: the full ID (None until set from the server). """ return self._properties.get('id') @@ -463,7 +472,7 @@ def list_partitions(self, client=None): """ query = self._require_client(client).run_sync_query( 'SELECT partition_id from [%s.%s$__PARTITIONS_SUMMARY__]' % - (self.dataset_id, self.name)) + (self.dataset_id, self.table_id)) query.run() return [row[0] for row in query.rows] @@ -484,8 +493,8 @@ def from_api_repr(cls, resource, dataset): 'tableId' not in resource['tableReference']): raise KeyError('Resource lacks required identity information:' '["tableReference"]["tableId"]') - table_name = resource['tableReference']['tableId'] - table = cls(table_name, dataset=dataset) + table_id = resource['tableReference']['tableId'] + table = cls(table_id, dataset=dataset) table._set_properties(resource) return table @@ -528,7 +537,7 @@ def _build_resource(self): 'tableReference': { 'projectId': self._dataset.project, 'datasetId': self._dataset.dataset_id, - 'tableId': self.name}, + 'tableId': self.table_id}, } if self.description is not None: resource['description'] = self.description @@ -1181,7 +1190,7 @@ def upload_from_file(self, _maybe_rewind(file_obj, rewind=rewind) _check_mode(file_obj) metadata = _get_upload_metadata( - source_format, self._schema, self._dataset, self.name) + source_format, self._schema, self._dataset, self.table_id) _configure_job_metadata(metadata, allow_jagged_rows, allow_quoted_newlines, create_disposition, encoding, field_delimiter, @@ -1346,7 +1355,7 @@ def _get_upload_headers(user_agent): } -def _get_upload_metadata(source_format, schema, dataset, name): +def _get_upload_metadata(source_format, schema, dataset, table_id): """Get base metadata for creating a table. :type source_format: str @@ -1359,8 +1368,8 @@ def _get_upload_metadata(source_format, schema, dataset, name): :type dataset: :class:`~google.cloud.bigquery.dataset.Dataset` :param dataset: A dataset which contains a table. - :type name: str - :param name: The name of the table. + :type table_id: str + :param table_id: The table_id of the table. :rtype: dict :returns: The metadata dictionary. @@ -1370,7 +1379,7 @@ def _get_upload_metadata(source_format, schema, dataset, name): 'destinationTable': { 'projectId': dataset.project, 'datasetId': dataset.dataset_id, - 'tableId': name, + 'tableId': table_id, }, } if schema: diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py index e1db93a973e4..0689e93b0f20 100644 --- a/bigquery/tests/unit/test_dataset.py +++ b/bigquery/tests/unit/test_dataset.py @@ -763,7 +763,7 @@ def test_list_tables_defaults(self): self.assertEqual(len(tables), len(DATA['tables'])) for found, expected in zip(tables, DATA['tables']): self.assertIsInstance(found, Table) - self.assertEqual(found.table_id, expected['id']) + self.assertEqual(found.full_table_id, expected['id']) self.assertEqual(found.table_type, expected['type']) self.assertEqual(token, TOKEN) @@ -810,7 +810,7 @@ def test_list_tables_explicit(self): self.assertEqual(len(tables), len(DATA['tables'])) for found, expected in zip(tables, DATA['tables']): self.assertIsInstance(found, Table) - self.assertEqual(found.table_id, expected['id']) + self.assertEqual(found.full_table_id, expected['id']) self.assertEqual(found.table_type, expected['type']) self.assertIsNone(token) @@ -827,9 +827,9 @@ def test_table_wo_schema(self): conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) dataset = self._make_one(self.DS_ID, client=client) - table = dataset.table('table_name') + table = dataset.table('table_id') self.assertIsInstance(table, Table) - self.assertEqual(table.name, 'table_name') + self.assertEqual(table.table_id, 'table_id') self.assertIs(table._dataset, dataset) self.assertEqual(table.schema, []) @@ -842,9 +842,9 @@ def test_table_w_schema(self): dataset = self._make_one(self.DS_ID, client=client) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = dataset.table('table_name', schema=[full_name, age]) + table = dataset.table('table_id', schema=[full_name, age]) self.assertIsInstance(table, Table) - self.assertEqual(table.name, 'table_name') + self.assertEqual(table.table_id, 'table_id') self.assertIs(table._dataset, dataset) self.assertEqual(table.schema, [full_name, age]) diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 09b57d7b7457..7c9a84f4503a 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -83,7 +83,7 @@ class _Base(object): PROJECT = 'project' SOURCE1 = 'http://example.com/source1.csv' DS_ID = 'datset_id' - TABLE_NAME = 'table_name' + TABLE_ID = 'table_id' JOB_NAME = 'job_name' def _make_one(self, *args, **kw): @@ -207,7 +207,7 @@ def _makeResource(self, started=False, ended=False): config['destinationTable'] = { 'projectId': self.PROJECT, 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME, + 'tableId': self.TABLE_ID, } if ended: @@ -276,7 +276,7 @@ def _verifyResourceProperties(self, job, resource): table_ref = config['destinationTable'] self.assertEqual(job.destination.project, table_ref['projectId']) self.assertEqual(job.destination.dataset_id, table_ref['datasetId']) - self.assertEqual(job.destination.name, table_ref['tableId']) + self.assertEqual(job.destination.table_id, table_ref['tableId']) if 'fieldDelimiter' in config: self.assertEqual(job.field_delimiter, @@ -544,7 +544,7 @@ def test_from_api_repr_bare(self): 'destinationTable': { 'projectId': self.PROJECT, 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME, + 'tableId': self.TABLE_ID, }, } }, @@ -604,7 +604,7 @@ def test_begin_w_bound_client(self): 'destinationTable': { 'projectId': self.PROJECT, 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME, + 'tableId': self.TABLE_ID, }, }, }, @@ -639,7 +639,7 @@ def test_begin_w_autodetect(self): 'destinationTable': { 'projectId': self.PROJECT, 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME, + 'tableId': self.TABLE_ID, }, 'autodetect': True }, @@ -663,7 +663,7 @@ def test_begin_w_alternate_client(self): 'destinationTable': { 'projectId': self.PROJECT, 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME, + 'tableId': self.TABLE_ID, }, 'allowJaggedRows': True, 'allowQuotedNewlines': True, @@ -867,7 +867,7 @@ def _verifyResourceProperties(self, job, resource): table_ref = config['destinationTable'] self.assertEqual(job.destination.project, table_ref['projectId']) self.assertEqual(job.destination.dataset_id, table_ref['datasetId']) - self.assertEqual(job.destination.name, table_ref['tableId']) + self.assertEqual(job.destination.table_id, table_ref['tableId']) sources = config.get('sourceTables') if sources is None: @@ -876,7 +876,7 @@ def _verifyResourceProperties(self, job, resource): for table_ref, table in zip(sources, job.sources): self.assertEqual(table.project, table_ref['projectId']) self.assertEqual(table.dataset_id, table_ref['datasetId']) - self.assertEqual(table.name, table_ref['tableId']) + self.assertEqual(table.table_id, table_ref['tableId']) if 'createDisposition' in config: self.assertEqual(job.create_disposition, @@ -1219,7 +1219,7 @@ def _verifyResourceProperties(self, job, resource): table_ref = config['sourceTable'] self.assertEqual(job.source.project, table_ref['projectId']) self.assertEqual(job.source.dataset_id, table_ref['datasetId']) - self.assertEqual(job.source.name, table_ref['tableId']) + self.assertEqual(job.source.table_id, table_ref['tableId']) if 'compression' in config: self.assertEqual(job.compression, @@ -1614,7 +1614,7 @@ def _verifyResourceProperties(self, job, resource): tb_ref = { 'projectId': table.project, 'datasetId': table.dataset_id, - 'tableId': table.name + 'tableId': table.table_id } self.assertEqual(tb_ref, query_config['destinationTable']) else: @@ -1934,21 +1934,21 @@ def test_referenced_tables(self): local1, local2, remote = job.referenced_tables self.assertIsInstance(local1, Table) - self.assertEqual(local1.name, 'local1') + self.assertEqual(local1.table_id, 'local1') self.assertIsInstance(local1._dataset, Dataset) self.assertEqual(local1.dataset_id, 'dataset') self.assertEqual(local1.project, self.PROJECT) self.assertIs(local1._dataset._client, client) self.assertIsInstance(local2, Table) - self.assertEqual(local2.name, 'local2') + self.assertEqual(local2.table_id, 'local2') self.assertIsInstance(local2._dataset, Dataset) self.assertEqual(local2.dataset_id, 'dataset') self.assertEqual(local2.project, self.PROJECT) self.assertIs(local2._dataset._client, client) self.assertIsInstance(remote, Table) - self.assertEqual(remote.name, 'other-table') + self.assertEqual(remote.table_id, 'other-table') self.assertIsInstance(remote._dataset, Dataset) self.assertEqual(remote.dataset_id, 'other-dataset') self.assertEqual(remote.project, 'other-project-123') @@ -2706,14 +2706,14 @@ def _get_query_results(self, job_id): class _Table(object): - def __init__(self, name=None): - self._name = name + def __init__(self, table_id=None): + self._table_id = table_id @property - def name(self): - if self._name is not None: - return self._name - return TestLoadJob.TABLE_NAME + def table_id(self): + if self._table_id is not None: + return self._table_id + return TestLoadJob.TABLE_ID @property def project(self): diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index cb481eac1932..f076c6d39938 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -80,7 +80,7 @@ def _setUpConstants(self): self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( tzinfo=UTC) self.ETAG = 'ETAG' - self.TABLE_ID = '%s:%s:%s' % ( + self.TABLE_FULL_ID = '%s:%s:%s' % ( self.PROJECT, self.DS_ID, self.TABLE_NAME) self.RESOURCE_URL = 'http://example.com/path/to/resource' self.NUM_BYTES = 12345 @@ -98,7 +98,7 @@ def _makeResource(self): {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, 'etag': 'ETAG', - 'id': self.TABLE_ID, + 'id': self.TABLE_FULL_ID, 'lastModifiedTime': self.WHEN_TS * 1000, 'location': 'US', 'selfLink': self.RESOURCE_URL, @@ -133,7 +133,7 @@ def _verifyReadonlyResourceProperties(self, table, resource): else: self.assertIsNone(table.self_link) - self.assertEqual(table.table_id, self.TABLE_ID) + self.assertEqual(table.full_table_id, self.TABLE_FULL_ID) self.assertEqual(table.table_type, 'TABLE' if 'view' not in resource else 'VIEW') @@ -168,7 +168,7 @@ def test_ctor(self): client = _Client(self.PROJECT) dataset = _Dataset(client) table = self._make_one(self.TABLE_NAME, dataset) - self.assertEqual(table.name, self.TABLE_NAME) + self.assertEqual(table.table_id, self.TABLE_NAME) self.assertIs(table._dataset, dataset) self.assertEqual(table.project, self.PROJECT) self.assertEqual(table.dataset_id, self.DS_ID) @@ -184,7 +184,7 @@ def test_ctor(self): self.assertIsNone(table.num_bytes) self.assertIsNone(table.num_rows) self.assertIsNone(table.self_link) - self.assertIsNone(table.table_id) + self.assertIsNone(table.full_table_id) self.assertIsNone(table.table_type) self.assertIsNone(table.description) @@ -284,7 +284,7 @@ def test_props_set_by_server(self): CREATED = datetime.datetime(2015, 7, 29, 12, 13, 22, tzinfo=UTC) MODIFIED = datetime.datetime(2015, 7, 29, 14, 47, 15, tzinfo=UTC) - TABLE_ID = '%s:%s:%s' % ( + TABLE_FULL_ID = '%s:%s:%s' % ( self.PROJECT, self.DS_ID, self.TABLE_NAME) URL = 'http://example.com/projects/%s/datasets/%s/tables/%s' % ( self.PROJECT, self.DS_ID, self.TABLE_NAME) @@ -297,7 +297,7 @@ def test_props_set_by_server(self): table._properties['numBytes'] = 12345 table._properties['numRows'] = 66 table._properties['selfLink'] = URL - table._properties['id'] = TABLE_ID + table._properties['id'] = TABLE_FULL_ID table._properties['type'] = 'TABLE' self.assertEqual(table.created, CREATED) @@ -306,7 +306,7 @@ def test_props_set_by_server(self): self.assertEqual(table.num_bytes, 12345) self.assertEqual(table.num_rows, 66) self.assertEqual(table.self_link, URL) - self.assertEqual(table.table_id, TABLE_ID) + self.assertEqual(table.full_table_id, TABLE_FULL_ID) self.assertEqual(table.table_type, 'TABLE') def test_description_setter_bad_value(self): @@ -431,7 +431,7 @@ def test_from_api_repr_bare(self): } klass = self._get_target_class() table = klass.from_api_repr(RESOURCE, dataset) - self.assertEqual(table.name, self.TABLE_NAME) + self.assertEqual(table.table_id, self.TABLE_NAME) self.assertIs(table._dataset, dataset) self._verifyResourceProperties(table, RESOURCE) @@ -1681,7 +1681,7 @@ def _initiate_resumable_upload_helper(self, num_retries=None): data = b'goodbye gudbi gootbee' stream = io.BytesIO(data) metadata = _get_upload_metadata( - 'CSV', table._schema, table._dataset, table.name) + 'CSV', table._schema, table._dataset, table.table_id) upload, transport = table._initiate_resumable_upload( client, stream, metadata, num_retries) @@ -1747,7 +1747,7 @@ def _do_multipart_upload_success_helper( data = b'Bzzzz-zap \x00\x01\xf4' stream = io.BytesIO(data) metadata = _get_upload_metadata( - 'CSV', table._schema, table._dataset, table.name) + 'CSV', table._schema, table._dataset, table.table_id) size = len(data) response = table._do_multipart_upload( client, stream, metadata, size, num_retries) @@ -1899,7 +1899,7 @@ def test_upload_file_resumable_metadata(self): 'destinationTable': { 'projectId': table._dataset._client.project, 'datasetId': table.dataset_id, - 'tableId': table.name, + 'tableId': table.table_id, }, 'allowJaggedRows': config_args['allow_jagged_rows'], 'allowQuotedNewlines':