|
19 | 19 | from google.cloud.bigquery._http import Connection |
20 | 20 | from google.cloud.bigquery.dataset import Dataset |
21 | 21 | from google.cloud.bigquery.job import CopyJob |
22 | | -from google.cloud.bigquery.job import ExtractTableToStorageJob |
23 | | -from google.cloud.bigquery.job import LoadTableFromStorageJob |
| 22 | +from google.cloud.bigquery.job import ExtractJob |
| 23 | +from google.cloud.bigquery.job import LoadJob |
24 | 24 | from google.cloud.bigquery.job import QueryJob |
25 | 25 | from google.cloud.bigquery.query import QueryResults |
26 | 26 |
|
@@ -204,20 +204,20 @@ def job_from_resource(self, resource): |
204 | 204 | :param resource: one job resource from API response |
205 | 205 |
|
206 | 206 | :rtype: One of: |
207 | | - :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`, |
| 207 | + :class:`google.cloud.bigquery.job.LoadJob`, |
208 | 208 | :class:`google.cloud.bigquery.job.CopyJob`, |
209 | | - :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`, |
| 209 | + :class:`google.cloud.bigquery.job.ExtractJob`, |
210 | 210 | :class:`google.cloud.bigquery.job.QueryJob`, |
211 | 211 | :class:`google.cloud.bigquery.job.RunSyncQueryJob` |
212 | 212 | :returns: the job instance, constructed via the resource |
213 | 213 | """ |
214 | 214 | config = resource['configuration'] |
215 | 215 | if 'load' in config: |
216 | | - return LoadTableFromStorageJob.from_api_repr(resource, self) |
| 216 | + return LoadJob.from_api_repr(resource, self) |
217 | 217 | elif 'copy' in config: |
218 | 218 | return CopyJob.from_api_repr(resource, self) |
219 | 219 | elif 'extract' in config: |
220 | | - return ExtractTableToStorageJob.from_api_repr(resource, self) |
| 220 | + return ExtractJob.from_api_repr(resource, self) |
221 | 221 | elif 'query' in config: |
222 | 222 | return QueryJob.from_api_repr(resource, self) |
223 | 223 | raise ValueError('Cannot parse job resource') |
@@ -288,11 +288,10 @@ def load_table_from_storage(self, job_name, destination, *source_uris): |
288 | 288 | :param source_uris: URIs of data files to be loaded; in format |
289 | 289 | ``gs://<bucket_name>/<object_name_or_glob>``. |
290 | 290 |
|
291 | | - :rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob` |
292 | | - :returns: a new ``LoadTableFromStorageJob`` instance |
| 291 | + :rtype: :class:`google.cloud.bigquery.job.LoadJob` |
| 292 | + :returns: a new ``LoadJob`` instance |
293 | 293 | """ |
294 | | - return LoadTableFromStorageJob(job_name, destination, source_uris, |
295 | | - client=self) |
| 294 | + return LoadJob(job_name, destination, source_uris, client=self) |
296 | 295 |
|
297 | 296 | def copy_table(self, job_name, destination, *sources): |
298 | 297 | """Construct a job for copying one or more tables into another table. |
@@ -331,11 +330,10 @@ def extract_table_to_storage(self, job_name, source, *destination_uris): |
331 | 330 | table data is to be extracted; in format |
332 | 331 | ``gs://<bucket_name>/<object_name_or_glob>``. |
333 | 332 |
|
334 | | - :rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob` |
335 | | - :returns: a new ``ExtractTableToStorageJob`` instance |
| 333 | + :rtype: :class:`google.cloud.bigquery.job.ExtractJob` |
| 334 | + :returns: a new ``ExtractJob`` instance |
336 | 335 | """ |
337 | | - return ExtractTableToStorageJob(job_name, source, destination_uris, |
338 | | - client=self) |
| 336 | + return ExtractJob(job_name, source, destination_uris, client=self) |
339 | 337 |
|
340 | 338 | def run_async_query(self, job_name, query, |
341 | 339 | udf_resources=(), query_parameters=()): |
|
0 commit comments