Skip to content

Commit

Permalink
BigQuery: Remove dataset property from TableReference and add project…
Browse files Browse the repository at this point in the history
…/dataset_id properties (#4011)

* adds dataset_id and project properties to TableReference

* Remove dataset property from Table and TableReference
  • Loading branch information
alixhami authored and tswast committed Oct 16, 2017
1 parent a200750 commit ca10fee
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 44 deletions.
4 changes: 2 additions & 2 deletions bigquery/google/cloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -1119,8 +1119,8 @@ def _build_resource(self):
"""Generate a resource for :meth:`begin`."""

source_ref = {
'projectId': self.source.dataset.project,
'datasetId': self.source.dataset.dataset_id,
'projectId': self.source.project,
'datasetId': self.source.dataset_id,
'tableId': self.source.table_id,
}

Expand Down
57 changes: 37 additions & 20 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,17 +64,27 @@ class TableReference(object):
"""

def __init__(self, dataset_ref, table_id):
self._dataset_ref = dataset_ref
self._project = dataset_ref.project
self._dataset_id = dataset_ref.dataset_id
self._table_id = table_id

@property
def dataset(self):
"""Pointer to the dataset.
def project(self):
"""Project bound to the table.
:rtype: :class:`google.cloud.bigquery.dataset.DatasetReference`
:returns: a pointer to the dataset.
:rtype: str
:returns: the project (derived from the dataset reference).
"""
return self._dataset_ref
return self._project

@property
def dataset_id(self):
"""ID of dataset containing the table.
:rtype: str
:returns: the ID (derived from the dataset reference).
"""
return self._dataset_id

@property
def table_id(self):
Expand All @@ -92,7 +102,8 @@ def path(self):
:rtype: str
:returns: the path based on project, dataset and table IDs.
"""
return '%s/tables/%s' % (self._dataset_ref.path, self._table_id)
return '/projects/%s/datasets/%s/tables/%s' % (
self._project, self._dataset_id, self._table_id)


class Table(object):
Expand All @@ -111,8 +122,9 @@ class Table(object):
_schema = None

def __init__(self, table_ref, schema=(), client=None):
self._project = table_ref.project
self._table_id = table_ref.table_id
self._dataset = table_ref.dataset
self._dataset_id = table_ref.dataset_id
self._properties = {}
# Let the @property do validation.
self.schema = schema
Expand All @@ -125,7 +137,7 @@ def project(self):
:rtype: str
:returns: the project (derived from the dataset).
"""
return self._dataset.project
return self._project

@property
def dataset_id(self):
Expand All @@ -134,7 +146,7 @@ def dataset_id(self):
:rtype: str
:returns: the ID (derived from the dataset).
"""
return self._dataset.dataset_id
return self._dataset_id

@property
def table_id(self):
Expand All @@ -152,7 +164,8 @@ def path(self):
:rtype: str
:returns: the path based on project, dataset and table IDs.
"""
return '%s/tables/%s' % (self._dataset.path, self.table_id)
return '/projects/%s/datasets/%s/tables/%s' % (
self._project, self._dataset_id, self._table_id)

@property
def schema(self):
Expand Down Expand Up @@ -550,8 +563,8 @@ def _build_resource(self):
"""Generate a resource for ``create`` or ``update``."""
resource = {
'tableReference': {
'projectId': self._dataset.project,
'datasetId': self._dataset.dataset_id,
'projectId': self._project,
'datasetId': self._dataset_id,
'tableId': self.table_id},
}
if self.description is not None:
Expand Down Expand Up @@ -596,7 +609,7 @@ def create(self, client=None):
"""
client = self._require_client(client)
path = '/projects/%s/datasets/%s/tables' % (
self._dataset.project, self._dataset.dataset_id)
self._project, self._dataset_id)
api_response = client._connection.api_request(
method='POST', path=path, data=self._build_resource())
self._set_properties(api_response)
Expand Down Expand Up @@ -1188,7 +1201,8 @@ def upload_from_file(self,
_maybe_rewind(file_obj, rewind=rewind)
_check_mode(file_obj)
metadata = _get_upload_metadata(
source_format, self._schema, self._dataset, self.table_id)
source_format, self._schema, self._project,
self._dataset_id, self.table_id)
_configure_job_metadata(metadata, allow_jagged_rows,
allow_quoted_newlines, create_disposition,
encoding, field_delimiter,
Expand Down Expand Up @@ -1353,7 +1367,7 @@ def _get_upload_headers(user_agent):
}


def _get_upload_metadata(source_format, schema, dataset, table_id):
def _get_upload_metadata(source_format, schema, project, dataset_id, table_id):
"""Get base metadata for creating a table.
:type source_format: str
Expand All @@ -1363,8 +1377,11 @@ def _get_upload_metadata(source_format, schema, dataset, table_id):
:type schema: list
:param schema: List of :class:`SchemaField` associated with a table.
:type dataset: :class:`~google.cloud.bigquery.dataset.Dataset`
:param dataset: A dataset which contains a table.
:type project: str
:param table_id: The project bound to the table.
:type dataset_id: str
:param table_id: The dataset_id of the dataset.
:type table_id: str
:param table_id: The table_id of the table.
Expand All @@ -1375,8 +1392,8 @@ def _get_upload_metadata(source_format, schema, dataset, table_id):
load_config = {
'sourceFormat': source_format,
'destinationTable': {
'projectId': dataset.project,
'datasetId': dataset.dataset_id,
'projectId': project,
'datasetId': dataset_id,
'tableId': table_id,
},
}
Expand Down
2 changes: 1 addition & 1 deletion bigquery/tests/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -568,7 +568,7 @@ def _load_table_for_extract_table(
self.to_delete.insert(0, blob)

dataset = retry_403(Config.CLIENT.create_dataset)(
Dataset(table.dataset.dataset_id))
Dataset(table.dataset_id))
self.to_delete.append(dataset)
table = dataset.table(table.table_id)
self.to_delete.insert(0, table)
Expand Down
9 changes: 6 additions & 3 deletions bigquery/tests/unit/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@ def test_ctor_defaults(self):
def test_table(self):
dataset_ref = self._make_one('some-project-1', 'dataset_1')
table_ref = dataset_ref.table('table_1')
self.assertIs(table_ref.dataset, dataset_ref)
self.assertEqual(table_ref.dataset_id, 'dataset_1')
self.assertEqual(table_ref.project, 'some-project-1')
self.assertEqual(table_ref.table_id, 'table_1')


Expand Down Expand Up @@ -543,7 +544,8 @@ def test_table_wo_schema(self):
table = dataset.table('table_id')
self.assertIsInstance(table, Table)
self.assertEqual(table.table_id, 'table_id')
self.assertIs(table._dataset, dataset)
self.assertEqual(table.dataset_id, self.DS_ID)
self.assertEqual(table.project, self.PROJECT)
self.assertEqual(table.schema, [])

def test_table_w_schema(self):
Expand All @@ -558,7 +560,8 @@ def test_table_w_schema(self):
table = dataset.table('table_id', schema=[full_name, age])
self.assertIsInstance(table, Table)
self.assertEqual(table.table_id, 'table_id')
self.assertIs(table._dataset, dataset)
self.assertEqual(table.dataset_id, self.DS_ID)
self.assertEqual(table.project, self.PROJECT)
self.assertEqual(table.schema, [full_name, age])


Expand Down
14 changes: 5 additions & 9 deletions bigquery/tests/unit/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -1220,8 +1220,8 @@ def _verifyResourceProperties(self, job, resource):
self.assertEqual(job.destination_uris, config['destinationUris'])

table_ref = config['sourceTable']
self.assertEqual(job.source.dataset.project, table_ref['projectId'])
self.assertEqual(job.source.dataset.dataset_id, table_ref['datasetId'])
self.assertEqual(job.source.project, table_ref['projectId'])
self.assertEqual(job.source.dataset_id, table_ref['datasetId'])
self.assertEqual(job.source.table_id, table_ref['tableId'])

if 'compression' in config:
Expand Down Expand Up @@ -1908,7 +1908,6 @@ def test_statement_type(self):
self.assertEqual(job.statement_type, statement_type)

def test_referenced_tables(self):
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.table import Table

ref_tables_resource = [{
Expand Down Expand Up @@ -1942,24 +1941,21 @@ def test_referenced_tables(self):

self.assertIsInstance(local1, Table)
self.assertEqual(local1.table_id, 'local1')
self.assertIsInstance(local1._dataset, Dataset)
self.assertEqual(local1.dataset_id, 'dataset')
self.assertEqual(local1.project, self.PROJECT)
self.assertIs(local1._dataset._client, client)
self.assertIs(local1._client, client)

self.assertIsInstance(local2, Table)
self.assertEqual(local2.table_id, 'local2')
self.assertIsInstance(local2._dataset, Dataset)
self.assertEqual(local2.dataset_id, 'dataset')
self.assertEqual(local2.project, self.PROJECT)
self.assertIs(local2._dataset._client, client)
self.assertIs(local2._client, client)

self.assertIsInstance(remote, Table)
self.assertEqual(remote.table_id, 'other-table')
self.assertIsInstance(remote._dataset, Dataset)
self.assertEqual(remote.dataset_id, 'other-dataset')
self.assertEqual(remote.project, 'other-project-123')
self.assertIs(remote._dataset._client, client)
self.assertIs(remote._client, client)

def test_undeclared_query_paramters(self):
from google.cloud.bigquery._helpers import ArrayQueryParameter
Expand Down
22 changes: 13 additions & 9 deletions bigquery/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def test_ctor_defaults(self):
dataset_ref = DatasetReference('project_1', 'dataset_1')

table_ref = self._make_one(dataset_ref, 'table_1')
self.assertIs(table_ref.dataset, dataset_ref)
self.assertEqual(table_ref.dataset_id, dataset_ref.dataset_id)
self.assertEqual(table_ref.table_id, 'table_1')


Expand Down Expand Up @@ -173,7 +173,6 @@ def test_ctor(self):
table = self._make_one(table_ref, client=client)

self.assertEqual(table.table_id, self.TABLE_NAME)
self.assertIs(table._dataset, dataset)
self.assertEqual(table.project, self.PROJECT)
self.assertEqual(table.dataset_id, self.DS_ID)
self.assertEqual(
Expand Down Expand Up @@ -1710,7 +1709,8 @@ def _initiate_resumable_upload_helper(self, num_retries=None):
data = b'goodbye gudbi gootbee'
stream = io.BytesIO(data)
metadata = _get_upload_metadata(
'CSV', table._schema, table._dataset, table.table_id)
'CSV', table._schema, table.project,
table.dataset_id, table.table_id)
upload, transport = table._initiate_resumable_upload(
client, stream, metadata, num_retries)

Expand Down Expand Up @@ -1777,7 +1777,8 @@ def _do_multipart_upload_success_helper(
data = b'Bzzzz-zap \x00\x01\xf4'
stream = io.BytesIO(data)
metadata = _get_upload_metadata(
'CSV', table._schema, table._dataset, table.table_id)
'CSV', table._schema, table.project,
table.dataset_id, table.table_id)
size = len(data)
response = table._do_multipart_upload(
client, stream, metadata, size, num_retries)
Expand Down Expand Up @@ -1928,7 +1929,7 @@ def test_upload_file_resumable_metadata(self):
'load': {
'sourceFormat': config_args['source_format'],
'destinationTable': {
'projectId': table._dataset.project,
'projectId': table.project,
'datasetId': table.dataset_id,
'tableId': table.table_id,
},
Expand Down Expand Up @@ -2254,18 +2255,20 @@ def test_w_subfields(self):
class Test__get_upload_metadata(unittest.TestCase):

@staticmethod
def _call_fut(source_format, schema, dataset, name):
def _call_fut(source_format, schema, project, dataset_id, name):
from google.cloud.bigquery.table import _get_upload_metadata

return _get_upload_metadata(source_format, schema, dataset, name)
return _get_upload_metadata(
source_format, schema, project, dataset_id, name)

def test_empty_schema(self):
source_format = 'AVRO'
dataset = mock.Mock(project='prediction',
spec=['dataset_id', 'project'])
dataset.dataset_id = 'market' # mock.Mock() treats `name` specially.
table_name = 'chairs'
metadata = self._call_fut(source_format, [], dataset, table_name)
metadata = self._call_fut(source_format, [], dataset.project,
dataset.dataset_id, table_name)

expected = {
'configuration': {
Expand All @@ -2290,7 +2293,8 @@ def test_with_schema(self):
dataset.dataset_id = 'movie' # mock.Mock() treats `name` specially.
table_name = 'teebull-neem'
metadata = self._call_fut(
source_format, [full_name], dataset, table_name)
source_format, [full_name], dataset.project,
dataset.dataset_id, table_name)

expected = {
'configuration': {
Expand Down

0 comments on commit ca10fee

Please sign in to comment.