11""" Google BigQuery support """
22
3+ from pandas .util .decorators import generate_dynamic_docstring
4+
35
46def _try_import ():
57 # since pandas is a dependency of pandas-gbq
68 # we need to import on first use
7-
89 try :
910 import pandas_gbq
1011 except ImportError :
@@ -20,82 +21,6 @@ def _try_import():
2021def read_gbq (query , project_id = None , index_col = None , col_order = None ,
2122 reauth = False , verbose = True , private_key = None , dialect = 'legacy' ,
2223 ** kwargs ):
23- r"""Load data from Google BigQuery.
24-
25- THIS IS AN EXPERIMENTAL LIBRARY
26-
27- The main method a user calls to execute a Query in Google BigQuery
28- and read results into a pandas DataFrame.
29-
30- Google BigQuery API Client Library v2 for Python is used.
31- Documentation is available at
32- https://developers.google.com/api-client-library/python/apis/bigquery/v2
33-
34- Authentication to the Google BigQuery service is via OAuth 2.0.
35-
36- - If "private_key" is not provided:
37-
38- By default "application default credentials" are used.
39-
40- .. versionadded:: 0.19.0
41-
42- If default application credentials are not found or are restrictive,
43- user account credentials are used. In this case, you will be asked to
44- grant permissions for product name 'pandas GBQ'.
45-
46- - If "private_key" is provided:
47-
48- Service account credentials will be used to authenticate.
49-
50- Parameters
51- ----------
52- query : str
53- SQL-Like Query to return data values
54- project_id : str
55- Google BigQuery Account project ID.
56- index_col : str (optional)
57- Name of result column to use for index in results DataFrame
58- col_order : list(str) (optional)
59- List of BigQuery column names in the desired order for results
60- DataFrame
61- reauth : boolean (default False)
62- Force Google BigQuery to reauthenticate the user. This is useful
63- if multiple accounts are used.
64- verbose : boolean (default True)
65- Verbose output
66- private_key : str (optional)
67- Service account private key in JSON format. Can be file path
68- or string contents. This is useful for remote server
69- authentication (eg. jupyter iPython notebook on remote host)
70-
71- .. versionadded:: 0.18.1
72-
73- dialect : {'legacy', 'standard'}, default 'legacy'
74- 'legacy' : Use BigQuery's legacy SQL dialect.
75- 'standard' : Use BigQuery's standard SQL (beta), which is
76- compliant with the SQL 2011 standard. For more information
77- see `BigQuery SQL Reference
78- <https://cloud.google.com/bigquery/sql-reference/>`__
79-
80- .. versionadded:: 0.19.0
81-
82- **kwargs : Arbitrary keyword arguments
83- configuration (dict): query config parameters for job processing.
84- For example:
85-
86- configuration = {'query': {'useQueryCache': False}}
87-
88- For more information see `BigQuery SQL Reference
89- <https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query>`
90-
91- .. versionadded:: 0.20.0
92-
93- Returns
94- -------
95- df: DataFrame
96- DataFrame representing results of query
97-
98- """
9924 pandas_gbq = _try_import ()
10025 return pandas_gbq .read_gbq (
10126 query , project_id = project_id ,
@@ -106,61 +31,18 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
10631 ** kwargs )
10732
10833
109- def to_gbq (dataframe , destination_table , project_id , chunksize = 10000 ,
110- verbose = True , reauth = False , if_exists = 'fail' , private_key = None ):
111- """Write a DataFrame to a Google BigQuery table.
112-
113- THIS IS AN EXPERIMENTAL LIBRARY
114-
115- The main method a user calls to export pandas DataFrame contents to
116- Google BigQuery table.
117-
118- Google BigQuery API Client Library v2 for Python is used.
119- Documentation is available at
120- https://developers.google.com/api-client-library/python/apis/bigquery/v2
121-
122- Authentication to the Google BigQuery service is via OAuth 2.0.
34+ read_gbq .__doc__ = generate_dynamic_docstring (
35+ lambda : _try_import ().read_gbq .__doc__ )
12336
124- - If "private_key" is not provided:
12537
126- By default "application default credentials" are used.
127-
128- .. versionadded:: 0.19.0
129-
130- If default application credentials are not found or are restrictive,
131- user account credentials are used. In this case, you will be asked to
132- grant permissions for product name 'pandas GBQ'.
133-
134- - If "private_key" is provided:
135-
136- Service account credentials will be used to authenticate.
137-
138- Parameters
139- ----------
140- dataframe : DataFrame
141- DataFrame to be written
142- destination_table : string
143- Name of table to be written, in the form 'dataset.tablename'
144- project_id : str
145- Google BigQuery Account project ID.
146- chunksize : int (default 10000)
147- Number of rows to be inserted in each chunk from the dataframe.
148- verbose : boolean (default True)
149- Show percentage complete
150- reauth : boolean (default False)
151- Force Google BigQuery to reauthenticate the user. This is useful
152- if multiple accounts are used.
153- if_exists : {'fail', 'replace', 'append'}, default 'fail'
154- 'fail': If table exists, do nothing.
155- 'replace': If table exists, drop it, recreate it, and insert data.
156- 'append': If table exists, insert data. Create if does not exist.
157- private_key : str (optional)
158- Service account private key in JSON format. Can be file path
159- or string contents. This is useful for remote server
160- authentication (eg. jupyter iPython notebook on remote host)
161- """
38+ def to_gbq (dataframe , destination_table , project_id , chunksize = 10000 ,
39+ verbose = True , reauth = False , if_exists = 'fail' , private_key = None ):
16240 pandas_gbq = _try_import ()
16341 pandas_gbq .to_gbq (dataframe , destination_table , project_id ,
16442 chunksize = chunksize ,
16543 verbose = verbose , reauth = reauth ,
16644 if_exists = if_exists , private_key = private_key )
45+
46+
47+ to_gbq .__doc__ = generate_dynamic_docstring (
48+ lambda : _try_import ().to_gbq .__doc__ )
0 commit comments