Skip to content

Commit 01fc4f4

Browse files
authored
bigquery: modify LoadJob (#4103)
This PR handles loading from GCS. Loading from a local file will be done separately.
1 parent e028c38 commit 01fc4f4

File tree

6 files changed

+376
-370
lines changed

6 files changed

+376
-370
lines changed

bigquery/google/cloud/bigquery/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
from google.cloud.bigquery.job import CopyJobConfig
3636
from google.cloud.bigquery.job import ExtractJobConfig
3737
from google.cloud.bigquery.job import QueryJobConfig
38+
from google.cloud.bigquery.job import LoadJobConfig
3839
from google.cloud.bigquery.schema import SchemaField
3940
from google.cloud.bigquery.table import Table
4041

@@ -47,6 +48,7 @@
4748
'CopyJobConfig',
4849
'ExtractJobConfig',
4950
'QueryJobConfig',
51+
'LoadJobConfig',
5052
'ScalarQueryParameter',
5153
'SchemaField',
5254
'StructQueryParameter',

bigquery/google/cloud/bigquery/client.py

Lines changed: 22 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@
1919
import collections
2020
import uuid
2121

22+
import six
23+
2224
from google.api.core import page_iterator
2325
from google.cloud.client import ClientWithProject
2426
from google.cloud.bigquery._http import Connection
@@ -490,26 +492,37 @@ def list_jobs(self, max_results=None, page_token=None, all_users=None,
490492
max_results=max_results,
491493
extra_params=extra_params)
492494

493-
def load_table_from_storage(self, job_id, destination, *source_uris):
494-
"""Construct a job for loading data into a table from CloudStorage.
495+
def load_table_from_storage(self, source_uris, destination,
496+
job_id=None, job_config=None):
497+
"""Starts a job for loading data into a table from CloudStorage.
495498
496499
See
497500
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load
498501
499-
:type job_id: str
500-
:param job_id: Name of the job.
502+
:type source_uris: One of:
503+
str
504+
sequence of string
505+
:param source_uris: URIs of data files to be loaded; in format
506+
``gs://<bucket_name>/<object_name_or_glob>``.
501507
502-
:type destination: :class:`google.cloud.bigquery.table.Table`
508+
:type destination: :class:`google.cloud.bigquery.table.TableReference`
503509
:param destination: Table into which data is to be loaded.
504510
505-
:type source_uris: sequence of string
506-
:param source_uris: URIs of data files to be loaded; in format
507-
``gs://<bucket_name>/<object_name_or_glob>``.
511+
:type job_id: str
512+
:param job_id: Name of the job.
513+
514+
:type job_config: :class:`google.cloud.bigquery.job.LoadJobConfig`
515+
:param job_config: (Optional) Extra configuration options for the job.
508516
509517
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
510518
:returns: a new ``LoadJob`` instance
511519
"""
512-
return LoadJob(job_id, destination, source_uris, client=self)
520+
job_id = _make_job_id(job_id)
521+
if isinstance(source_uris, six.string_types):
522+
source_uris = [source_uris]
523+
job = LoadJob(job_id, source_uris, destination, self, job_config)
524+
job.begin()
525+
return job
513526

514527
def copy_table(self, sources, destination, job_id=None, job_config=None):
515528
"""Start a job for copying one or more tables into another table.

0 commit comments

Comments
 (0)