Skip to content
2 changes: 2 additions & 0 deletions gcloud/bigquery/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,5 @@
from gcloud.bigquery.client import Client
from gcloud.bigquery.connection import SCOPE
from gcloud.bigquery.dataset import Dataset
from gcloud.bigquery.table import SchemaField
from gcloud.bigquery.table import Table
43 changes: 43 additions & 0 deletions gcloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,49 @@ class Client(JSONClient):

_connection_class = Connection

def list_datasets(self, include_all=False, max_results=None,
page_token=None):
"""List datasets for the project associated with this client.

See:
https://cloud.google.com/bigquery/docs/reference/v2/datasets/list

This comment was marked as spam.

This comment was marked as spam.

:type include_all: boolean
:param include_all: True if results include hidden datasets.

:type max_results: int
:param max_results: maximum number of datasets to return, If not
passed, defaults to a value set by the API.

:type page_token: string
:param page_token: opaque marker for the next "page" of datasets. If
not passed, the API will return the first page of
datasets.

:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.bigquery.dataset.Dataset`, plus a
"next page token" string: if the token is not None,
indicates that more datasets can be retrieved with another
call (pass that value as ``page_token``).
"""
params = {}

if include_all:
params['all'] = True

if max_results is not None:
params['maxResults'] = max_results

if page_token is not None:
params['pageToken'] = page_token

path = '/projects/%s/datasets' % (self.project,)
resp = self.connection.api_request(method='GET', path=path,
query_params=params)
datasets = [Dataset.from_api_repr(resource, self)
for resource in resp['datasets']]
return datasets, resp.get('nextPageToken')

def dataset(self, name):
"""Construct a dataset bound to this client.

Expand Down
60 changes: 60 additions & 0 deletions gcloud/bigquery/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,29 @@ def location(self, value):
raise ValueError("Pass a string, or None")
self._properties['location'] = value

@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a dataset given its API representation

:type resource: dict
:param resource: dataset resource representation returned from the API

:type client: :class:`gcloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.

:rtype: :class:`gcloud.bigquery.dataset.Dataset`
:returns: Dataset parsed from ``resource``.
"""
if ('datasetReference' not in resource or
'datasetId' not in resource['datasetReference']):
raise KeyError('Resource lacks required identity information:'
'["datasetReference"]["datasetId"]')
name = resource['datasetReference']['datasetId']

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

dataset = cls(name, client=client)
dataset._set_properties(resource)
return dataset

def _require_client(self, client):
"""Check client or verify over-ride.

Expand Down Expand Up @@ -357,6 +380,43 @@ def delete(self, client=None):
client = self._require_client(client)
client.connection.api_request(method='DELETE', path=self.path)

def list_tables(self, max_results=None, page_token=None):
"""List tables for the project associated with this client.

See:
https://cloud.google.com/bigquery/docs/reference/v2/tables/list

This comment was marked as spam.

This comment was marked as spam.

:type max_results: int
:param max_results: maximum number of tables to return, If not
passed, defaults to a value set by the API.

:type page_token: string
:param page_token: opaque marker for the next "page" of datasets. If
not passed, the API will return the first page of
datasets.

:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.bigquery.table.Table`, plus a
"next page token" string: if not None, indicates that
more tables can be retrieved with another call (pass that
value as ``page_token``).
"""
params = {}

if max_results is not None:
params['maxResults'] = max_results

if page_token is not None:
params['pageToken'] = page_token

path = '/projects/%s/datasets/%s/tables' % (self.project, self.name)
connection = self._client.connection
resp = connection.api_request(method='GET', path=path,
query_params=params)
tables = [Table.from_api_repr(resource, self)
for resource in resp['tables']]
return tables, resp.get('nextPageToken')

def table(self, name, schema=()):
"""Construct a table bound to this dataset.

Expand Down
24 changes: 23 additions & 1 deletion gcloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,28 @@ def view_query(self):
"""Delete SQL query defining the table as a view."""
self._properties.pop('view', None)

@classmethod
def from_api_repr(cls, resource, dataset):
"""Factory: construct a table given its API representation

:type resource: dict
:param resource: table resource representation returned from the API

:type dataset: :class:`gcloud.bigquery.dataset.Dataset`
:param dataset: The dataset containing the table.

:rtype: :class:`gcloud.bigquery.table.Table`
:returns: Table parsed from ``resource``.
"""
if ('tableReference' not in resource or
'tableId' not in resource['tableReference']):
raise KeyError('Resource lacks required identity information:'
'["tableReference"]["tableId"]')
table_name = resource['tableReference']['tableId']

This comment was marked as spam.

This comment was marked as spam.

table = cls(table_name, dataset=dataset)
table._set_properties(resource)
return table

def _require_client(self, client):
"""Check client or verify over-ride.

Expand Down Expand Up @@ -344,7 +366,7 @@ def _set_properties(self, api_response):
"""
self._properties.clear()
cleaned = api_response.copy()
schema = cleaned.pop('schema', {})
schema = cleaned.pop('schema', {'fields': ()})
self.schema = self._parse_schema_resource(schema)
if 'creationTime' in cleaned:
cleaned['creationTime'] = float(cleaned['creationTime'])
Expand Down
94 changes: 94 additions & 0 deletions gcloud/bigquery/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,88 @@ def test_dataset(self):
self.assertEqual(dataset.name, DATASET)
self.assertTrue(dataset._client is client)

def test_list_datasets_defaults(self):
from gcloud.bigquery.dataset import Dataset
PROJECT = 'PROJECT'
DATASET_1 = 'dataset_one'
DATASET_2 = 'dataset_two'
PATH = 'projects/%s/datasets' % PROJECT
TOKEN = 'TOKEN'
DATA = {
'nextPageToken': TOKEN,
'datasets': [
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_1),
'datasetReference': {'datasetId': DATASET_1,
'projectId': PROJECT},
'friendlyName': None},
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_2),
'datasetReference': {'datasetId': DATASET_2,
'projectId': PROJECT},
'friendlyName': 'Two'},
]
}
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)

datasets, token = client.list_datasets()

self.assertEqual(len(datasets), len(DATA['datasets']))
for found, expected in zip(datasets, DATA['datasets']):
self.assertTrue(isinstance(found, Dataset))
self.assertEqual(found.dataset_id, expected['id'])
self.assertEqual(found.friendly_name, expected['friendlyName'])
self.assertEqual(token, TOKEN)

self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)

def test_list_datasets_explicit(self):
from gcloud.bigquery.dataset import Dataset
PROJECT = 'PROJECT'
DATASET_1 = 'dataset_one'
DATASET_2 = 'dataset_two'
PATH = 'projects/%s/datasets' % PROJECT
TOKEN = 'TOKEN'
DATA = {
'datasets': [
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_1),
'datasetReference': {'datasetId': DATASET_1,
'projectId': PROJECT},
'friendlyName': None},
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_2),
'datasetReference': {'datasetId': DATASET_2,
'projectId': PROJECT},
'friendlyName': 'Two'},
]
}
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)

datasets, token = client.list_datasets(
include_all=True, max_results=3, page_token=TOKEN)

self.assertEqual(len(datasets), len(DATA['datasets']))
for found, expected in zip(datasets, DATA['datasets']):
self.assertTrue(isinstance(found, Dataset))
self.assertEqual(found.dataset_id, expected['id'])
self.assertEqual(found.friendly_name, expected['friendlyName'])
self.assertEqual(token, None)

self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'],
{'all': True, 'maxResults': 3, 'pageToken': TOKEN})


class _Credentials(object):

Expand All @@ -58,3 +140,15 @@ def create_scoped_required():
def create_scoped(self, scope):
self._scopes = scope
return self


class _Connection(object):

def __init__(self, *responses):
self._responses = responses
self._requested = []

def api_request(self, **kw):
self._requested.append(kw)
response, self._responses = self._responses[0], self._responses[1:]
return response
Loading