Skip to content

Commit

Permalink
Import bqstorage dependency at top of module.
Browse files Browse the repository at this point in the history
  • Loading branch information
tswast committed Apr 17, 2019
1 parent 1099952 commit 9016469
Showing 1 changed file with 14 additions and 3 deletions.
17 changes: 14 additions & 3 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from __future__ import absolute_import

import collections
import concurrent.futures
import copy
import datetime
import json
Expand All @@ -25,6 +26,11 @@

import six

try:
from google.cloud import bigquery_storage_v1beta1
except ImportError: # pragma: NO COVER
bigquery_storage_v1beta1 = None

try:
import pandas
except ImportError: # pragma: NO COVER
Expand All @@ -46,6 +52,10 @@
from google.cloud.bigquery.external_config import ExternalConfig


_NO_BQSTORAGE_ERROR = (
"The google-cloud-bigquery-storage library is not installed, "
"please install google-cloud-bigquery-storage to use bqstorage features."
)
_NO_PANDAS_ERROR = (
"The pandas library is not installed, please install "
"pandas to use the to_dataframe() function."
Expand Down Expand Up @@ -287,7 +297,8 @@ def to_bqstorage(self):
google.cloud.bigquery_storage_v1beta1.types.TableReference:
A reference to this table in the BigQuery Storage API.
"""
from google.cloud import bigquery_storage_v1beta1
if bigquery_storage_v1beta1 is None:
raise ValueError(_NO_BQSTORAGE_ERROR)

table_ref = bigquery_storage_v1beta1.types.TableReference()
table_ref.project_id = self._project
Expand Down Expand Up @@ -1391,8 +1402,8 @@ def _to_dataframe_bqstorage_stream(

def _to_dataframe_bqstorage(self, bqstorage_client, dtypes):
"""Use (faster, but billable) BQ Storage API to construct DataFrame."""
import concurrent.futures
from google.cloud import bigquery_storage_v1beta1
if bigquery_storage_v1beta1 is None:
raise ValueError(_NO_BQSTORAGE_ERROR)

if "$" in self._table.table_id:
raise ValueError(
Expand Down

0 comments on commit 9016469

Please sign in to comment.