Skip to content

Commit 112b441

Browse files
jbatswast
authored andcommitted
bigquery: remove unused function (#4147)
1 parent d1b564d commit 112b441

File tree

2 files changed

+0
-111
lines changed

2 files changed

+0
-111
lines changed

bigquery/google/cloud/bigquery/table.py

Lines changed: 0 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -831,46 +831,3 @@ def _build_schema_resource(fields):
831831
info['fields'] = _build_schema_resource(field.fields)
832832
infos.append(info)
833833
return infos
834-
# pylint: enable=unused-argument
835-
836-
837-
def _get_upload_metadata(source_format, schema, project, dataset_id, table_id):
838-
"""Get base metadata for creating a table.
839-
840-
:type source_format: str
841-
:param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'.
842-
job configuration option.
843-
844-
:type schema: list
845-
:param schema: List of :class:`SchemaField` associated with a table.
846-
847-
:type project: str
848-
:param table_id: The project bound to the table.
849-
850-
:type dataset_id: str
851-
:param table_id: The dataset_id of the dataset.
852-
853-
:type table_id: str
854-
:param table_id: The table_id of the table.
855-
856-
:rtype: dict
857-
:returns: The metadata dictionary.
858-
"""
859-
load_config = {
860-
'sourceFormat': source_format,
861-
'destinationTable': {
862-
'projectId': project,
863-
'datasetId': dataset_id,
864-
'tableId': table_id,
865-
},
866-
}
867-
if schema:
868-
load_config['schema'] = {
869-
'fields': _build_schema_resource(schema),
870-
}
871-
872-
return {
873-
'configuration': {
874-
'load': load_config,
875-
},
876-
}

bigquery/tests/unit/test_table.py

Lines changed: 0 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -1101,74 +1101,6 @@ def test_w_subfields(self):
11011101
'mode': 'REQUIRED'}]})
11021102

11031103

1104-
class Test__get_upload_metadata(unittest.TestCase):
1105-
1106-
@staticmethod
1107-
def _call_fut(source_format, schema, project, dataset_id, name):
1108-
from google.cloud.bigquery.table import _get_upload_metadata
1109-
1110-
return _get_upload_metadata(
1111-
source_format, schema, project, dataset_id, name)
1112-
1113-
def test_empty_schema(self):
1114-
source_format = 'AVRO'
1115-
dataset = mock.Mock(project='prediction',
1116-
spec=['dataset_id', 'project'])
1117-
dataset.dataset_id = 'market' # mock.Mock() treats `name` specially.
1118-
table_name = 'chairs'
1119-
metadata = self._call_fut(source_format, [], dataset.project,
1120-
dataset.dataset_id, table_name)
1121-
1122-
expected = {
1123-
'configuration': {
1124-
'load': {
1125-
'sourceFormat': source_format,
1126-
'destinationTable': {
1127-
'projectId': dataset.project,
1128-
'datasetId': dataset.dataset_id,
1129-
'tableId': table_name,
1130-
},
1131-
},
1132-
},
1133-
}
1134-
self.assertEqual(metadata, expected)
1135-
1136-
def test_with_schema(self):
1137-
from google.cloud.bigquery.table import SchemaField
1138-
1139-
source_format = 'CSV'
1140-
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
1141-
dataset = mock.Mock(project='blind', spec=['dataset_id', 'project'])
1142-
dataset.dataset_id = 'movie' # mock.Mock() treats `name` specially.
1143-
table_name = 'teebull-neem'
1144-
metadata = self._call_fut(
1145-
source_format, [full_name], dataset.project,
1146-
dataset.dataset_id, table_name)
1147-
1148-
expected = {
1149-
'configuration': {
1150-
'load': {
1151-
'sourceFormat': source_format,
1152-
'destinationTable': {
1153-
'projectId': dataset.project,
1154-
'datasetId': dataset.dataset_id,
1155-
'tableId': table_name,
1156-
},
1157-
'schema': {
1158-
'fields': [
1159-
{
1160-
'name': full_name.name,
1161-
'type': full_name.field_type,
1162-
'mode': full_name.mode,
1163-
},
1164-
],
1165-
},
1166-
},
1167-
},
1168-
}
1169-
self.assertEqual(metadata, expected)
1170-
1171-
11721104
class _Client(object):
11731105

11741106
_query_results = ()

0 commit comments

Comments
 (0)