Skip to content

Commit bc520a8

Browse files
tseavertswast
authored andcommitted
Rename job classes (#3797)
* Rename class: 'jobs.LoadTableFromStorageJob' -> 'jobs.LoadJob'. * Rename class: 'jobs.ExtractTableToStorageJob' -> 'jobs.ExtractJob'.
1 parent 16f6566 commit bc520a8

File tree

5 files changed

+46
-46
lines changed

5 files changed

+46
-46
lines changed

bigquery/google/cloud/bigquery/client.py

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919
from google.cloud.bigquery._http import Connection
2020
from google.cloud.bigquery.dataset import Dataset
2121
from google.cloud.bigquery.job import CopyJob
22-
from google.cloud.bigquery.job import ExtractTableToStorageJob
23-
from google.cloud.bigquery.job import LoadTableFromStorageJob
22+
from google.cloud.bigquery.job import ExtractJob
23+
from google.cloud.bigquery.job import LoadJob
2424
from google.cloud.bigquery.job import QueryJob
2525
from google.cloud.bigquery.query import QueryResults
2626

@@ -204,20 +204,20 @@ def job_from_resource(self, resource):
204204
:param resource: one job resource from API response
205205
206206
:rtype: One of:
207-
:class:`google.cloud.bigquery.job.LoadTableFromStorageJob`,
207+
:class:`google.cloud.bigquery.job.LoadJob`,
208208
:class:`google.cloud.bigquery.job.CopyJob`,
209-
:class:`google.cloud.bigquery.job.ExtractTableToStorageJob`,
209+
:class:`google.cloud.bigquery.job.ExtractJob`,
210210
:class:`google.cloud.bigquery.job.QueryJob`,
211211
:class:`google.cloud.bigquery.job.RunSyncQueryJob`
212212
:returns: the job instance, constructed via the resource
213213
"""
214214
config = resource['configuration']
215215
if 'load' in config:
216-
return LoadTableFromStorageJob.from_api_repr(resource, self)
216+
return LoadJob.from_api_repr(resource, self)
217217
elif 'copy' in config:
218218
return CopyJob.from_api_repr(resource, self)
219219
elif 'extract' in config:
220-
return ExtractTableToStorageJob.from_api_repr(resource, self)
220+
return ExtractJob.from_api_repr(resource, self)
221221
elif 'query' in config:
222222
return QueryJob.from_api_repr(resource, self)
223223
raise ValueError('Cannot parse job resource')
@@ -288,11 +288,10 @@ def load_table_from_storage(self, job_name, destination, *source_uris):
288288
:param source_uris: URIs of data files to be loaded; in format
289289
``gs://<bucket_name>/<object_name_or_glob>``.
290290
291-
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
292-
:returns: a new ``LoadTableFromStorageJob`` instance
291+
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
292+
:returns: a new ``LoadJob`` instance
293293
"""
294-
return LoadTableFromStorageJob(job_name, destination, source_uris,
295-
client=self)
294+
return LoadJob(job_name, destination, source_uris, client=self)
296295

297296
def copy_table(self, job_name, destination, *sources):
298297
"""Construct a job for copying one or more tables into another table.
@@ -331,11 +330,10 @@ def extract_table_to_storage(self, job_name, source, *destination_uris):
331330
table data is to be extracted; in format
332331
``gs://<bucket_name>/<object_name_or_glob>``.
333332
334-
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
335-
:returns: a new ``ExtractTableToStorageJob`` instance
333+
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
334+
:returns: a new ``ExtractJob`` instance
336335
"""
337-
return ExtractTableToStorageJob(job_name, source, destination_uris,
338-
client=self)
336+
return ExtractJob(job_name, source, destination_uris, client=self)
339337

340338
def run_async_query(self, job_name, query,
341339
udf_resources=(), query_parameters=()):

bigquery/google/cloud/bigquery/job.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -525,8 +525,8 @@ class _LoadConfiguration(object):
525525
_write_disposition = None
526526

527527

528-
class LoadTableFromStorageJob(_AsyncJob):
529-
"""Asynchronous job for loading data into a table from CloudStorage.
528+
class LoadJob(_AsyncJob):
529+
"""Asynchronous job for loading data into a table from remote URI.
530530
531531
:type name: str
532532
:param name: the name of the job
@@ -535,8 +535,10 @@ class LoadTableFromStorageJob(_AsyncJob):
535535
:param destination: Table into which data is to be loaded.
536536
537537
:type source_uris: sequence of string
538-
:param source_uris: URIs of one or more data files to be loaded, in
539-
format ``gs://<bucket_name>/<object_name_or_glob>``.
538+
:param source_uris:
539+
URIs of one or more data files to be loaded. See
540+
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceUris
541+
for supported URI formats.
540542
541543
:type client: :class:`google.cloud.bigquery.client.Client`
542544
:param client: A client which holds credentials and project configuration
@@ -550,7 +552,7 @@ class LoadTableFromStorageJob(_AsyncJob):
550552
_JOB_TYPE = 'load'
551553

552554
def __init__(self, name, destination, source_uris, client, schema=()):
553-
super(LoadTableFromStorageJob, self).__init__(name, client)
555+
super(LoadJob, self).__init__(name, client)
554556
self.destination = destination
555557
self.source_uris = source_uris
556558
self._configuration = _LoadConfiguration()
@@ -775,7 +777,7 @@ def from_api_repr(cls, resource, client):
775777
:param client: Client which holds credentials and project
776778
configuration for the dataset.
777779
778-
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
780+
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
779781
:returns: Job parsed from ``resource``.
780782
"""
781783
name, config = cls._get_resource_config(resource)
@@ -919,7 +921,7 @@ class _ExtractConfiguration(object):
919921
_print_header = None
920922

921923

922-
class ExtractTableToStorageJob(_AsyncJob):
924+
class ExtractJob(_AsyncJob):
923925
"""Asynchronous job: extract data from a table into Cloud Storage.
924926
925927
:type name: str
@@ -940,7 +942,7 @@ class ExtractTableToStorageJob(_AsyncJob):
940942
_JOB_TYPE = 'extract'
941943

942944
def __init__(self, name, source, destination_uris, client):
943-
super(ExtractTableToStorageJob, self).__init__(name, client)
945+
super(ExtractJob, self).__init__(name, client)
944946
self.source = source
945947
self.destination_uris = destination_uris
946948
self._configuration = _ExtractConfiguration()
@@ -1018,7 +1020,7 @@ def from_api_repr(cls, resource, client):
10181020
:param client: Client which holds credentials and project
10191021
configuration for the dataset.
10201022
1021-
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
1023+
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
10221024
:returns: Job parsed from ``resource``.
10231025
"""
10241026
name, config = cls._get_resource_config(resource)

bigquery/google/cloud/bigquery/table.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1132,7 +1132,7 @@ def upload_from_file(self,
11321132
:type null_marker: str
11331133
:param null_marker: Optional. A custom null marker (example: "\\N")
11341134
1135-
:rtype: :class:`~google.cloud.bigquery.jobs.LoadTableFromStorageJob`
1135+
:rtype: :class:`~google.cloud.bigquery.jobs.LoadJob`
11361136
11371137
:returns: the job instance used to load the data (e.g., for
11381138
querying status). Note that the job is already started:

bigquery/tests/unit/test_client.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -268,9 +268,9 @@ def test_job_from_resource_unknown_type(self):
268268

269269
def test_list_jobs_defaults(self):
270270
import six
271-
from google.cloud.bigquery.job import LoadTableFromStorageJob
271+
from google.cloud.bigquery.job import LoadJob
272272
from google.cloud.bigquery.job import CopyJob
273-
from google.cloud.bigquery.job import ExtractTableToStorageJob
273+
from google.cloud.bigquery.job import ExtractJob
274274
from google.cloud.bigquery.job import QueryJob
275275

276276
PROJECT = 'PROJECT'
@@ -281,9 +281,9 @@ def test_list_jobs_defaults(self):
281281
SOURCE_URI = 'gs://test_bucket/src_object*'
282282
DESTINATION_URI = 'gs://test_bucket/dst_object*'
283283
JOB_TYPES = {
284-
'load_job': LoadTableFromStorageJob,
284+
'load_job': LoadJob,
285285
'copy_job': CopyJob,
286-
'extract_job': ExtractTableToStorageJob,
286+
'extract_job': ExtractJob,
287287
'query_job': QueryJob,
288288
}
289289
PATH = 'projects/%s/jobs' % PROJECT
@@ -400,13 +400,13 @@ def test_list_jobs_defaults(self):
400400

401401
def test_list_jobs_load_job_wo_sourceUris(self):
402402
import six
403-
from google.cloud.bigquery.job import LoadTableFromStorageJob
403+
from google.cloud.bigquery.job import LoadJob
404404

405405
PROJECT = 'PROJECT'
406406
DATASET = 'test_dataset'
407407
SOURCE_TABLE = 'source_table'
408408
JOB_TYPES = {
409-
'load_job': LoadTableFromStorageJob,
409+
'load_job': LoadJob,
410410
}
411411
PATH = 'projects/%s/jobs' % PROJECT
412412
TOKEN = 'TOKEN'
@@ -487,7 +487,7 @@ def test_list_jobs_explicit_missing(self):
487487
'stateFilter': 'done'})
488488

489489
def test_load_table_from_storage(self):
490-
from google.cloud.bigquery.job import LoadTableFromStorageJob
490+
from google.cloud.bigquery.job import LoadJob
491491

492492
PROJECT = 'PROJECT'
493493
JOB = 'job_name'
@@ -500,7 +500,7 @@ def test_load_table_from_storage(self):
500500
dataset = client.dataset(DATASET)
501501
destination = dataset.table(DESTINATION)
502502
job = client.load_table_from_storage(JOB, destination, SOURCE_URI)
503-
self.assertIsInstance(job, LoadTableFromStorageJob)
503+
self.assertIsInstance(job, LoadJob)
504504
self.assertIs(job._client, client)
505505
self.assertEqual(job.name, JOB)
506506
self.assertEqual(list(job.source_uris), [SOURCE_URI])
@@ -528,7 +528,7 @@ def test_copy_table(self):
528528
self.assertIs(job.destination, destination)
529529

530530
def test_extract_table_to_storage(self):
531-
from google.cloud.bigquery.job import ExtractTableToStorageJob
531+
from google.cloud.bigquery.job import ExtractJob
532532

533533
PROJECT = 'PROJECT'
534534
JOB = 'job_name'
@@ -541,7 +541,7 @@ def test_extract_table_to_storage(self):
541541
dataset = client.dataset(DATASET)
542542
source = dataset.table(SOURCE)
543543
job = client.extract_table_to_storage(JOB, source, DESTINATION)
544-
self.assertIsInstance(job, ExtractTableToStorageJob)
544+
self.assertIsInstance(job, ExtractJob)
545545
self.assertIs(job._client, client)
546546
self.assertEqual(job.name, JOB)
547547
self.assertEqual(job.source, source)

bigquery/tests/unit/test_job.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -143,24 +143,24 @@ def _verifyReadonlyResourceProperties(self, job, resource):
143143
self.assertIsNone(job.user_email)
144144

145145

146-
class TestLoadTableFromStorageJob(unittest.TestCase, _Base):
146+
class TestLoadJob(unittest.TestCase, _Base):
147147
JOB_TYPE = 'load'
148148

149149
@staticmethod
150150
def _get_target_class():
151-
from google.cloud.bigquery.job import LoadTableFromStorageJob
151+
from google.cloud.bigquery.job import LoadJob
152152

153-
return LoadTableFromStorageJob
153+
return LoadJob
154154

155155
def _setUpConstants(self):
156-
super(TestLoadTableFromStorageJob, self)._setUpConstants()
156+
super(TestLoadJob, self)._setUpConstants()
157157
self.INPUT_FILES = 2
158158
self.INPUT_BYTES = 12345
159159
self.OUTPUT_BYTES = 23456
160160
self.OUTPUT_ROWS = 345
161161

162162
def _makeResource(self, started=False, ended=False):
163-
resource = super(TestLoadTableFromStorageJob, self)._makeResource(
163+
resource = super(TestLoadJob, self)._makeResource(
164164
started, ended)
165165
config = resource['configuration']['load']
166166
config['sourceUris'] = [self.SOURCE1]
@@ -1142,19 +1142,19 @@ def test_reload_w_alternate_client(self):
11421142
self._verifyResourceProperties(job, RESOURCE)
11431143

11441144

1145-
class TestExtractTableToStorageJob(unittest.TestCase, _Base):
1145+
class TestExtractJob(unittest.TestCase, _Base):
11461146
JOB_TYPE = 'extract'
11471147
SOURCE_TABLE = 'source_table'
11481148
DESTINATION_URI = 'gs://bucket_name/object_name'
11491149

11501150
@staticmethod
11511151
def _get_target_class():
1152-
from google.cloud.bigquery.job import ExtractTableToStorageJob
1152+
from google.cloud.bigquery.job import ExtractJob
11531153

1154-
return ExtractTableToStorageJob
1154+
return ExtractJob
11551155

11561156
def _makeResource(self, started=False, ended=False):
1157-
resource = super(TestExtractTableToStorageJob, self)._makeResource(
1157+
resource = super(TestExtractJob, self)._makeResource(
11581158
started, ended)
11591159
config = resource['configuration']['extract']
11601160
config['sourceTable'] = {
@@ -2181,15 +2181,15 @@ def __init__(self, name=None):
21812181
def name(self):
21822182
if self._name is not None:
21832183
return self._name
2184-
return TestLoadTableFromStorageJob.TABLE_NAME
2184+
return TestLoadJob.TABLE_NAME
21852185

21862186
@property
21872187
def project(self):
2188-
return TestLoadTableFromStorageJob.PROJECT
2188+
return TestLoadJob.PROJECT
21892189

21902190
@property
21912191
def dataset_name(self):
2192-
return TestLoadTableFromStorageJob.DS_NAME
2192+
return TestLoadJob.DS_NAME
21932193

21942194

21952195
class _Connection(object):

0 commit comments

Comments
 (0)