|
18 | 18 |
|
19 | 19 | from gcloud.exceptions import NotFound
|
20 | 20 | from gcloud._helpers import _datetime_from_microseconds
|
| 21 | +from gcloud.bigquery.dataset import Dataset |
21 | 22 | from gcloud.bigquery.table import SchemaField
|
| 23 | +from gcloud.bigquery.table import Table |
22 | 24 | from gcloud.bigquery.table import _build_schema_resource
|
23 | 25 | from gcloud.bigquery.table import _parse_schema_resource
|
24 | 26 |
|
@@ -122,6 +124,13 @@ class Encoding(_EnumProperty):
|
122 | 124 | ALLOWED = (UTF_8, ISO_8559_1)
|
123 | 125 |
|
124 | 126 |
|
| 127 | +class QueryPriority(_EnumProperty): |
| 128 | + """Pseudo-enum for ``RunQueryJob.priority`` property.""" |
| 129 | + INTERACTIVE = 'INTERACTIVE' |
| 130 | + BATCH = 'BATCH' |
| 131 | + ALLOWED = (INTERACTIVE, BATCH) |
| 132 | + |
| 133 | + |
125 | 134 | class SourceFormat(_EnumProperty):
|
126 | 135 | """Pseudo-enum for ``source_format`` properties."""
|
127 | 136 | CSV = 'CSV'
|
@@ -403,7 +412,7 @@ class _LoadConfiguration(object):
|
403 | 412 |
|
404 | 413 |
|
405 | 414 | class LoadTableFromStorageJob(_BaseJob):
|
406 |
| - """Asynchronous job for loading data into a BQ table from CloudStorage. |
| 415 | + """Asynchronous job for loading data into a table from CloudStorage. |
407 | 416 |
|
408 | 417 | :type name: string
|
409 | 418 | :param name: the name of the job
|
@@ -616,7 +625,7 @@ class _CopyConfiguration(object):
|
616 | 625 |
|
617 | 626 |
|
618 | 627 | class CopyJob(_BaseJob):
|
619 |
| - """Asynchronous job: copy data into a BQ table from other tables. |
| 628 | + """Asynchronous job: copy data into a table from other tables. |
620 | 629 |
|
621 | 630 | :type name: string
|
622 | 631 | :param name: the name of the job
|
@@ -695,7 +704,7 @@ class _ExtractConfiguration(object):
|
695 | 704 |
|
696 | 705 |
|
697 | 706 | class ExtractTableToStorageJob(_BaseJob):
|
698 |
| - """Asynchronous job: extract data from a BQ table into Cloud Storage. |
| 707 | + """Asynchronous job: extract data from a table into Cloud Storage. |
699 | 708 |
|
700 | 709 | :type name: string
|
701 | 710 | :param name: the name of the job
|
@@ -773,3 +782,140 @@ def _build_resource(self):
|
773 | 782 | self._populate_config_resource(configuration)
|
774 | 783 |
|
775 | 784 | return resource
|
| 785 | + |
| 786 | + |
| 787 | +class _QueryConfiguration(object): |
| 788 | + """User-settable configuration options for query jobs.""" |
| 789 | + # None -> use server default. |
| 790 | + _allow_large_results = None |
| 791 | + _create_disposition = None |
| 792 | + _default_dataset = None |
| 793 | + _destination_table = None |
| 794 | + _flatten_results = None |
| 795 | + _priority = None |
| 796 | + _use_query_cache = None |
| 797 | + _write_disposition = None |
| 798 | + |
| 799 | + |
| 800 | +class RunQueryJob(_BaseJob): |
| 801 | + """Asynchronous job: query tables. |
| 802 | +
|
| 803 | + :type name: string |
| 804 | + :param name: the name of the job |
| 805 | +
|
| 806 | + :type query: string |
| 807 | + :param query: SQL query string |
| 808 | +
|
| 809 | + :type client: :class:`gcloud.bigquery.client.Client` |
| 810 | + :param client: A client which holds credentials and project configuration |
| 811 | + for the dataset (which requires a project). |
| 812 | + """ |
| 813 | + def __init__(self, name, query, client): |
| 814 | + super(RunQueryJob, self).__init__(name, client) |
| 815 | + self.query = query |
| 816 | + self._configuration = _QueryConfiguration() |
| 817 | + |
| 818 | + allow_large_results = _TypedProperty('allow_large_results', bool) |
| 819 | + """See: |
| 820 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.allowLargeResults |
| 821 | + """ |
| 822 | + |
| 823 | + create_disposition = CreateDisposition('create_disposition') |
| 824 | + """See: |
| 825 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.createDisposition |
| 826 | + """ |
| 827 | + |
| 828 | + default_dataset = _TypedProperty('default_dataset', Dataset) |
| 829 | + """See: |
| 830 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.default_dataset |
| 831 | + """ |
| 832 | + |
| 833 | + destination_table = _TypedProperty('destination_table', Table) |
| 834 | + """See: |
| 835 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.destinationTable |
| 836 | + """ |
| 837 | + |
| 838 | + flatten_results = _TypedProperty('flatten_results', bool) |
| 839 | + """See: |
| 840 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.flattenResults |
| 841 | + """ |
| 842 | + |
| 843 | + priority = QueryPriority('priority') |
| 844 | + """See: |
| 845 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.priority |
| 846 | + """ |
| 847 | + |
| 848 | + use_query_cache = _TypedProperty('use_query_cache', bool) |
| 849 | + """See: |
| 850 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.useQueryCache |
| 851 | + """ |
| 852 | + |
| 853 | + write_disposition = WriteDisposition('write_disposition') |
| 854 | + """See: |
| 855 | + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.writeDisposition |
| 856 | + """ |
| 857 | + |
| 858 | + def _destination_table_resource(self): |
| 859 | + if self.destination_table is not None: |
| 860 | + return { |
| 861 | + 'projectId': self.destination_table.project, |
| 862 | + 'datasetId': self.destination_table.dataset_name, |
| 863 | + 'tableId': self.destination_table.name, |
| 864 | + } |
| 865 | + |
| 866 | + def _populate_config_resource(self, configuration): |
| 867 | + """Helper for _build_resource: copy config properties to resource""" |
| 868 | + if self.allow_large_results is not None: |
| 869 | + configuration['allowLargeResults'] = self.allow_large_results |
| 870 | + if self.create_disposition is not None: |
| 871 | + configuration['createDisposition'] = self.create_disposition |
| 872 | + if self.default_dataset is not None: |
| 873 | + configuration['defaultDataset'] = { |
| 874 | + 'projectId': self.default_dataset.project, |
| 875 | + 'datasetId': self.default_dataset.name, |
| 876 | + } |
| 877 | + if self.destination_table is not None: |
| 878 | + table_res = self._destination_table_resource() |
| 879 | + configuration['destinationTable'] = table_res |
| 880 | + if self.flatten_results is not None: |
| 881 | + configuration['flattenResults'] = self.flatten_results |
| 882 | + if self.priority is not None: |
| 883 | + configuration['priority'] = self.priority |
| 884 | + if self.use_query_cache is not None: |
| 885 | + configuration['useQueryCache'] = self.use_query_cache |
| 886 | + if self.write_disposition is not None: |
| 887 | + configuration['writeDisposition'] = self.write_disposition |
| 888 | + |
| 889 | + def _build_resource(self): |
| 890 | + """Generate a resource for :meth:`begin`.""" |
| 891 | + |
| 892 | + resource = { |
| 893 | + 'jobReference': { |
| 894 | + 'projectId': self.project, |
| 895 | + 'jobId': self.name, |
| 896 | + }, |
| 897 | + 'configuration': { |
| 898 | + 'query': { |
| 899 | + 'query': self.query, |
| 900 | + }, |
| 901 | + }, |
| 902 | + } |
| 903 | + configuration = resource['configuration']['query'] |
| 904 | + self._populate_config_resource(configuration) |
| 905 | + |
| 906 | + return resource |
| 907 | + |
| 908 | + def _scrub_local_properties(self, cleaned): |
| 909 | + """Helper: handle subclass properties in cleaned.""" |
| 910 | + configuration = cleaned['configuration']['query'] |
| 911 | + dest_remote = configuration.get('destinationTable') |
| 912 | + |
| 913 | + if dest_remote is None: |
| 914 | + if self.destination_table is not None: |
| 915 | + del self.destination_table |
| 916 | + else: |
| 917 | + dest_local = self._destination_table_resource() |
| 918 | + if dest_remote != dest_local: |
| 919 | + assert dest_remote['projectId'] == self.project |
| 920 | + dataset = self._client.dataset(dest_remote['datasetId']) |
| 921 | + self.destination_table = dataset.table(dest_remote['tableId']) |
0 commit comments