Skip to content

Commit

Permalink
Blacken libraries (googleapis#6794)
Browse files Browse the repository at this point in the history
  • Loading branch information
crwilcox authored and Erik Webb committed Dec 3, 2018
1 parent 76ac80c commit 096a09d
Show file tree
Hide file tree
Showing 114 changed files with 25,126 additions and 22,467 deletions.
1,411 changes: 739 additions & 672 deletions bigquery/docs/snippets.py

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions bigquery/google/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@

try:
import pkg_resources

pkg_resources.declare_namespace(__name__)
except ImportError:
import pkgutil

__path__ = pkgutil.extend_path(__path__, __name__)
2 changes: 2 additions & 0 deletions bigquery/google/cloud/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@

try:
import pkg_resources

pkg_resources.declare_namespace(__name__)
except ImportError:
import pkgutil

__path__ = pkgutil.extend_path(__path__, __name__)
88 changes: 45 additions & 43 deletions bigquery/google/cloud/bigquery/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@


from pkg_resources import get_distribution
__version__ = get_distribution('google-cloud-bigquery').version

__version__ = get_distribution("google-cloud-bigquery").version

from google.cloud.bigquery.client import Client
from google.cloud.bigquery.dataset import AccessEntry
Expand Down Expand Up @@ -73,52 +74,52 @@
from google.cloud.bigquery.table import TimePartitioning

__all__ = [
'__version__',
'Client',
"__version__",
"Client",
# Queries
'QueryJob',
'QueryJobConfig',
'ArrayQueryParameter',
'ScalarQueryParameter',
'StructQueryParameter',
"QueryJob",
"QueryJobConfig",
"ArrayQueryParameter",
"ScalarQueryParameter",
"StructQueryParameter",
# Datasets
'Dataset',
'DatasetReference',
'AccessEntry',
"Dataset",
"DatasetReference",
"AccessEntry",
# Tables
'EncryptionConfiguration',
'Table',
'TableReference',
'Row',
'CopyJob',
'CopyJobConfig',
'ExtractJob',
'ExtractJobConfig',
'LoadJob',
'LoadJobConfig',
'UnknownJob',
'TimePartitioningType',
'TimePartitioning',
"EncryptionConfiguration",
"Table",
"TableReference",
"Row",
"CopyJob",
"CopyJobConfig",
"ExtractJob",
"ExtractJobConfig",
"LoadJob",
"LoadJobConfig",
"UnknownJob",
"TimePartitioningType",
"TimePartitioning",
# Shared helpers
'SchemaField',
'UDFResource',
'ExternalConfig',
'BigtableOptions',
'BigtableColumnFamily',
'BigtableColumn',
'CSVOptions',
'GoogleSheetsOptions',
'DEFAULT_RETRY',
"SchemaField",
"UDFResource",
"ExternalConfig",
"BigtableOptions",
"BigtableColumnFamily",
"BigtableColumn",
"CSVOptions",
"GoogleSheetsOptions",
"DEFAULT_RETRY",
# Enum Constants
'Compression',
'CreateDisposition',
'DestinationFormat',
'ExternalSourceFormat',
'Encoding',
'QueryPriority',
'SchemaUpdateOption',
'SourceFormat',
'WriteDisposition'
"Compression",
"CreateDisposition",
"DestinationFormat",
"ExternalSourceFormat",
"Encoding",
"QueryPriority",
"SchemaUpdateOption",
"SourceFormat",
"WriteDisposition",
]


Expand All @@ -127,4 +128,5 @@ def load_ipython_extension(ipython):
from google.cloud.bigquery.magics import _cell_magic

ipython.register_magic_function(
_cell_magic, magic_kind='cell', magic_name='bigquery')
_cell_magic, magic_kind="cell", magic_name="bigquery"
)
119 changes: 59 additions & 60 deletions bigquery/google/cloud/bigquery/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@
from google.cloud._helpers import _RFC3339_NO_FRACTION
from google.cloud._helpers import _to_bytes

_RFC3339_MICROS_NO_ZULU = '%Y-%m-%dT%H:%M:%S.%f'
_TIMEONLY_WO_MICROS = '%H:%M:%S'
_TIMEONLY_W_MICROS = '%H:%M:%S.%f'
_RFC3339_MICROS_NO_ZULU = "%Y-%m-%dT%H:%M:%S.%f"
_TIMEONLY_WO_MICROS = "%H:%M:%S"
_TIMEONLY_W_MICROS = "%H:%M:%S.%f"


def _not_null(value, field):
"""Check whether 'value' should be coerced to 'field' type."""
return value is not None or field.mode != 'NULLABLE'
return value is not None or field.mode != "NULLABLE"


def _int_from_json(value, field):
Expand All @@ -56,7 +56,7 @@ def _decimal_from_json(value, field):
def _bool_from_json(value, field):
"""Coerce 'value' to a bool, if set or not nullable."""
if _not_null(value, field):
return value.lower() in ['t', 'true', '1']
return value.lower() in ["t", "true", "1"]


def _string_from_json(value, _):
Expand Down Expand Up @@ -93,19 +93,21 @@ def _timestamp_query_param_from_json(value, field):
# Canonical formats for timestamps in BigQuery are flexible. See:
# g.co/cloud/bigquery/docs/reference/standard-sql/data-types#timestamp-type
# The separator between the date and time can be 'T' or ' '.
value = value.replace(' ', 'T', 1)
value = value.replace(" ", "T", 1)
# The UTC timezone may be formatted as Z or +00:00.
value = value.replace('Z', '')
value = value.replace('+00:00', '')
value = value.replace("Z", "")
value = value.replace("+00:00", "")

if '.' in value:
if "." in value:
# YYYY-MM-DDTHH:MM:SS.ffffff
return datetime.datetime.strptime(
value, _RFC3339_MICROS_NO_ZULU).replace(tzinfo=UTC)
return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU).replace(
tzinfo=UTC
)
else:
# YYYY-MM-DDTHH:MM:SS
return datetime.datetime.strptime(
value, _RFC3339_NO_FRACTION).replace(tzinfo=UTC)
return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION).replace(
tzinfo=UTC
)
else:
return None

Expand All @@ -123,7 +125,7 @@ def _datetime_from_json(value, field):
:data:`None`).
"""
if _not_null(value, field):
if '.' in value:
if "." in value:
# YYYY-MM-DDTHH:MM:SS.ffffff
return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU)
else:
Expand Down Expand Up @@ -156,37 +158,37 @@ def _record_from_json(value, field):
"""Coerce 'value' to a mapping, if set or not nullable."""
if _not_null(value, field):
record = {}
record_iter = zip(field.fields, value['f'])
record_iter = zip(field.fields, value["f"])
for subfield, cell in record_iter:
converter = _CELLDATA_FROM_JSON[subfield.field_type]
if subfield.mode == 'REPEATED':
value = [converter(item['v'], subfield) for item in cell['v']]
if subfield.mode == "REPEATED":
value = [converter(item["v"], subfield) for item in cell["v"]]
else:
value = converter(cell['v'], subfield)
value = converter(cell["v"], subfield)
record[subfield.name] = value
return record


_CELLDATA_FROM_JSON = {
'INTEGER': _int_from_json,
'INT64': _int_from_json,
'FLOAT': _float_from_json,
'FLOAT64': _float_from_json,
'NUMERIC': _decimal_from_json,
'BOOLEAN': _bool_from_json,
'BOOL': _bool_from_json,
'STRING': _string_from_json,
'GEOGRAPHY': _string_from_json,
'BYTES': _bytes_from_json,
'TIMESTAMP': _timestamp_from_json,
'DATETIME': _datetime_from_json,
'DATE': _date_from_json,
'TIME': _time_from_json,
'RECORD': _record_from_json,
"INTEGER": _int_from_json,
"INT64": _int_from_json,
"FLOAT": _float_from_json,
"FLOAT64": _float_from_json,
"NUMERIC": _decimal_from_json,
"BOOLEAN": _bool_from_json,
"BOOL": _bool_from_json,
"STRING": _string_from_json,
"GEOGRAPHY": _string_from_json,
"BYTES": _bytes_from_json,
"TIMESTAMP": _timestamp_from_json,
"DATETIME": _datetime_from_json,
"DATE": _date_from_json,
"TIME": _time_from_json,
"RECORD": _record_from_json,
}

_QUERY_PARAMS_FROM_JSON = dict(_CELLDATA_FROM_JSON)
_QUERY_PARAMS_FROM_JSON['TIMESTAMP'] = _timestamp_query_param_from_json
_QUERY_PARAMS_FROM_JSON["TIMESTAMP"] = _timestamp_query_param_from_json


def _field_to_index_mapping(schema):
Expand All @@ -210,13 +212,12 @@ def _row_tuple_from_json(row, schema):
:returns: A tuple of data converted to native types.
"""
row_data = []
for field, cell in zip(schema, row['f']):
for field, cell in zip(schema, row["f"]):
converter = _CELLDATA_FROM_JSON[field.field_type]
if field.mode == 'REPEATED':
row_data.append([converter(item['v'], field)
for item in cell['v']])
if field.mode == "REPEATED":
row_data.append([converter(item["v"], field) for item in cell["v"]])
else:
row_data.append(converter(cell['v'], field))
row_data.append(converter(cell["v"], field))

return tuple(row_data)

Expand All @@ -226,8 +227,7 @@ def _rows_from_json(values, schema):
from google.cloud.bigquery import Row

field_to_index = _field_to_index_mapping(schema)
return [Row(_row_tuple_from_json(r, schema), field_to_index)
for r in values]
return [Row(_row_tuple_from_json(r, schema), field_to_index) for r in values]


def _int_to_json(value):
Expand All @@ -252,14 +252,14 @@ def _decimal_to_json(value):
def _bool_to_json(value):
"""Coerce 'value' to an JSON-compatible representation."""
if isinstance(value, bool):
value = 'true' if value else 'false'
value = "true" if value else "false"
return value


def _bytes_to_json(value):
"""Coerce 'value' to an JSON-compatible representation."""
if isinstance(value, bytes):
value = base64.standard_b64encode(value).decode('ascii')
value = base64.standard_b64encode(value).decode("ascii")
return value


Expand All @@ -272,8 +272,7 @@ def _timestamp_to_json_parameter(value):
if value.tzinfo not in (None, UTC):
# Convert to UTC and remove the time zone info.
value = value.replace(tzinfo=None) - value.utcoffset()
value = '%s %s+00:00' % (
value.date().isoformat(), value.time().isoformat())
value = "%s %s+00:00" % (value.date().isoformat(), value.time().isoformat())
return value


Expand Down Expand Up @@ -310,30 +309,30 @@ def _time_to_json(value):

# Converters used for scalar values marshalled as row data.
_SCALAR_VALUE_TO_JSON_ROW = {
'INTEGER': _int_to_json,
'INT64': _int_to_json,
'FLOAT': _float_to_json,
'FLOAT64': _float_to_json,
'NUMERIC': _decimal_to_json,
'BOOLEAN': _bool_to_json,
'BOOL': _bool_to_json,
'BYTES': _bytes_to_json,
'TIMESTAMP': _timestamp_to_json_row,
'DATETIME': _datetime_to_json,
'DATE': _date_to_json,
'TIME': _time_to_json,
"INTEGER": _int_to_json,
"INT64": _int_to_json,
"FLOAT": _float_to_json,
"FLOAT64": _float_to_json,
"NUMERIC": _decimal_to_json,
"BOOLEAN": _bool_to_json,
"BOOL": _bool_to_json,
"BYTES": _bytes_to_json,
"TIMESTAMP": _timestamp_to_json_row,
"DATETIME": _datetime_to_json,
"DATE": _date_to_json,
"TIME": _time_to_json,
}


# Converters used for scalar values marshalled as query parameters.
_SCALAR_VALUE_TO_JSON_PARAM = _SCALAR_VALUE_TO_JSON_ROW.copy()
_SCALAR_VALUE_TO_JSON_PARAM['TIMESTAMP'] = _timestamp_to_json_parameter
_SCALAR_VALUE_TO_JSON_PARAM["TIMESTAMP"] = _timestamp_to_json_parameter


def _snake_to_camel_case(value):
"""Convert snake case string to camel case."""
words = value.split('_')
return words[0] + ''.join(map(str.capitalize, words[1:]))
words = value.split("_")
return words[0] + "".join(map(str.capitalize, words[1:]))


def _get_sub_prop(container, keys, default=None):
Expand Down
10 changes: 4 additions & 6 deletions bigquery/google/cloud/bigquery/_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,13 @@ class Connection(_http.JSONConnection):
:param client: The client that owns the current connection.
"""

API_BASE_URL = 'https://www.googleapis.com'
API_BASE_URL = "https://www.googleapis.com"
"""The base of the API call URL."""

API_VERSION = 'v2'
API_VERSION = "v2"
"""The version of the API, used in building the API call's URL."""

API_URL_TEMPLATE = '{api_base_url}/bigquery/{api_version}{path}'
API_URL_TEMPLATE = "{api_base_url}/bigquery/{api_version}{path}"
"""A template for the URL of a particular API call."""

_EXTRA_HEADERS = {
_http.CLIENT_INFO_HEADER: _CLIENT_INFO,
}
_EXTRA_HEADERS = {_http.CLIENT_INFO_HEADER: _CLIENT_INFO}
Loading

0 comments on commit 096a09d

Please sign in to comment.