Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

bigquery: remove unused function #4147

Merged
merged 1 commit into from
Oct 10, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 0 additions & 43 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -831,46 +831,3 @@ def _build_schema_resource(fields):
info['fields'] = _build_schema_resource(field.fields)
infos.append(info)
return infos
# pylint: enable=unused-argument


def _get_upload_metadata(source_format, schema, project, dataset_id, table_id):
"""Get base metadata for creating a table.

:type source_format: str
:param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'.
job configuration option.

:type schema: list
:param schema: List of :class:`SchemaField` associated with a table.

:type project: str
:param table_id: The project bound to the table.

:type dataset_id: str
:param table_id: The dataset_id of the dataset.

:type table_id: str
:param table_id: The table_id of the table.

:rtype: dict
:returns: The metadata dictionary.
"""
load_config = {
'sourceFormat': source_format,
'destinationTable': {
'projectId': project,
'datasetId': dataset_id,
'tableId': table_id,
},
}
if schema:
load_config['schema'] = {
'fields': _build_schema_resource(schema),
}

return {
'configuration': {
'load': load_config,
},
}
68 changes: 0 additions & 68 deletions bigquery/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1101,74 +1101,6 @@ def test_w_subfields(self):
'mode': 'REQUIRED'}]})


class Test__get_upload_metadata(unittest.TestCase):

@staticmethod
def _call_fut(source_format, schema, project, dataset_id, name):
from google.cloud.bigquery.table import _get_upload_metadata

return _get_upload_metadata(
source_format, schema, project, dataset_id, name)

def test_empty_schema(self):
source_format = 'AVRO'
dataset = mock.Mock(project='prediction',
spec=['dataset_id', 'project'])
dataset.dataset_id = 'market' # mock.Mock() treats `name` specially.
table_name = 'chairs'
metadata = self._call_fut(source_format, [], dataset.project,
dataset.dataset_id, table_name)

expected = {
'configuration': {
'load': {
'sourceFormat': source_format,
'destinationTable': {
'projectId': dataset.project,
'datasetId': dataset.dataset_id,
'tableId': table_name,
},
},
},
}
self.assertEqual(metadata, expected)

def test_with_schema(self):
from google.cloud.bigquery.table import SchemaField

source_format = 'CSV'
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
dataset = mock.Mock(project='blind', spec=['dataset_id', 'project'])
dataset.dataset_id = 'movie' # mock.Mock() treats `name` specially.
table_name = 'teebull-neem'
metadata = self._call_fut(
source_format, [full_name], dataset.project,
dataset.dataset_id, table_name)

expected = {
'configuration': {
'load': {
'sourceFormat': source_format,
'destinationTable': {
'projectId': dataset.project,
'datasetId': dataset.dataset_id,
'tableId': table_name,
},
'schema': {
'fields': [
{
'name': full_name.name,
'type': full_name.field_type,
'mode': full_name.mode,
},
],
},
},
},
}
self.assertEqual(metadata, expected)


class _Client(object):

_query_results = ()
Expand Down