From f24d507ab7a23e46098f777147f286a949c0ed98 Mon Sep 17 00:00:00 2001 From: hkdevandla <60490673+hkdevandla@users.noreply.github.com> Date: Thu, 20 Aug 2020 10:28:46 -0700 Subject: [PATCH] feat: Migrate API client to Microgenerator (#54) * Add samples for Data Catalog lookup_entry [(#2148)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2148) * Add samples for Data Catalog lookup_entry * Add tests for Data Catalog lookup_entry * Add samples for lookup_entry by SQL Resource * Add README.rst * Improve command line interface * Removed the "lookup-" prefix from commands * Handle the --sql-resource optional argument by subparsers * Refer to GCP public assets in tests * Add region tags to support Data Catalog docs [(#2169)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2169) * Adds updates including compute [(#2436)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2436) * Adds updates including compute * Python 2 compat pytest * Fixing weird \r\n issue from GH merge * Put asset tests back in * Re-add pod operator test * Hack parameter for k8s pod operator * Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh * chore(deps): update dependency google-cloud-datacatalog to v0.6.0 [(#3069)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3069) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-datacatalog](https://togithub.com/googleapis/python-datacatalog) | minor | `==0.5.0` -> `==0.6.0` | --- ### Release Notes
googleapis/python-datacatalog ### [`v0.6.0`](https://togithub.com/googleapis/python-datacatalog/blob/master/CHANGELOG.md#​060httpswwwgithubcomgoogleapispython-datacatalogcomparev050v060-2020-02-24) [Compare Source](https://togithub.com/googleapis/python-datacatalog/compare/v0.5.0...v0.6.0) ##### Features - **datacatalog:** add sample for create a fileset entry quickstart ([#​9977](https://www.github.com/googleapis/python-datacatalog/issues/9977)) ([16eaf4b](https://www.github.com/googleapis/python-datacatalog/commit/16eaf4b16cdc0ce7361afb1d8dac666cea2a9db0)) - **datacatalog:** undeprecate resource name helper methods, bump copyright year to 2020, tweak docstring formatting (via synth) ([#​10228](https://www.github.com/googleapis/python-datacatalog/issues/10228)) ([84e5e7c](https://www.github.com/googleapis/python-datacatalog/commit/84e5e7c340fa189ce4cffca4fdee82cc7ded9f70)) - add `list_entry_groups`, `list_entries`, `update_entry_group` methods to v1beta1 (via synth) ([#​6](https://www.github.com/googleapis/python-datacatalog/issues/6)) ([b51902e](https://www.github.com/googleapis/python-datacatalog/commit/b51902e26d590f52c9412756a178265850b7d516)) ##### Bug Fixes - **datacatalog:** deprecate resource name helper methods (via synth) ([#​9831](https://www.github.com/googleapis/python-datacatalog/issues/9831)) ([22db3f0](https://www.github.com/googleapis/python-datacatalog/commit/22db3f0683b8aca544cd96c0063dcc8157ad7335))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). * Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot * Update dependency google-cloud-datacatalog to v0.7.0 [(#3320)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3320) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> * Update Data Catalog samples to V1 [(#3382)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3382) Co-authored-by: Takashi Matsuo * chore(deps): update dependency google-cloud-datacatalog to v0.8.0 [(#3850)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3850) * Update dependency google-cloud-datacatalog to v1 [(#4115)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4115) * chore(deps): update dependency pytest to v5.4.3 [(#4279)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole * Update dependency pytest to v6 [(#4390)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4390) * chore: update templates * chore: update templates * feat: Migrate to use Microgenerator * feat: Migrate to use Microgenerator * feat: Migrate to use Microgenerator * Migrate API to microgenerator * Migrate API to microgenerator * Samples tests * fix samples tests * fix lint errors and test coverage metrics * docs update * fix docs * fix docs * fix docs * remove .python-version file Co-authored-by: Ricardo Mendes <50331050+ricardosm-cit@users.noreply.github.com> Co-authored-by: Gus Class Co-authored-by: DPEBot Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh Co-authored-by: WhiteSource Renovate Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Marcelo Costa Co-authored-by: Takashi Matsuo Co-authored-by: Leah Cole --- .../create_fileset_entry_quickstart.py | 25 ++++++++----------- datacatalog/snippets/README.rst | 24 ++++-------------- datacatalog/snippets/lookup_entry.py | 12 ++++----- datacatalog/tests/conftest.py | 13 ++++------ datacatalog/tests/test_create_entry_group.py | 2 +- datacatalog/v1beta1/create_entry_group.py | 5 ++-- datacatalog/v1beta1/create_fileset_entry.py | 2 +- datacatalog/v1beta1/datacatalog_get_entry.py | 6 ++--- .../v1beta1/datacatalog_lookup_entry.py | 5 ++-- .../datacatalog_lookup_entry_sql_resource.py | 5 ++-- datacatalog/v1beta1/datacatalog_search.py | 2 +- 11 files changed, 38 insertions(+), 63 deletions(-) diff --git a/datacatalog/quickstart/create_fileset_entry_quickstart.py b/datacatalog/quickstart/create_fileset_entry_quickstart.py index 55b0af59e689..5e1c99f0f3d0 100644 --- a/datacatalog/quickstart/create_fileset_entry_quickstart.py +++ b/datacatalog/quickstart/create_fileset_entry_quickstart.py @@ -40,7 +40,7 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Create an Entry Group. # Construct a full Entry Group object to send to the API. - entry_group_obj = datacatalog_v1beta1.types.EntryGroup() + entry_group_obj = datacatalog_v1beta1.EntryGroup() entry_group_obj.display_name = "My Fileset Entry Group" entry_group_obj.description = "This Entry Group consists of ...." @@ -48,26 +48,23 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Raises google.api_core.exceptions.AlreadyExists if the Entry Group # already exists within the project. entry_group = client.create_entry_group( - parent=datacatalog_v1beta1.DataCatalogClient.location_path( + request = {'parent': datacatalog_v1beta1.DataCatalogClient.location_path( project_id, location_id - ), - entry_group_id=entry_group_id, - entry_group=entry_group_obj, - ) + ), 'entry_group_id': entry_group_id, 'entry_group': entry_group_obj}) print("Created entry group {}".format(entry_group.name)) # Create a Fileset Entry. # Construct a full Entry object to send to the API. - entry = datacatalog_v1beta1.types.Entry() + entry = datacatalog_v1beta1.Entry() entry.display_name = "My Fileset" entry.description = "This Fileset consists of ..." entry.gcs_fileset_spec.file_patterns.append("gs://cloud-samples-data/*") - entry.type = datacatalog_v1beta1.enums.EntryType.FILESET + entry.type = datacatalog_v1beta1.EntryType.FILESET # Create the Schema, for example when you have a csv file. columns = [] columns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="first_name", description="First name", mode="REQUIRED", @@ -76,7 +73,7 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id ) columns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="last_name", description="Last name", mode="REQUIRED", type="STRING" ) ) @@ -84,19 +81,19 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Create sub columns for the addresses parent column subcolumns = [] subcolumns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="city", description="City", mode="NULLABLE", type="STRING" ) ) subcolumns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="state", description="State", mode="NULLABLE", type="STRING" ) ) columns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="addresses", description="Addresses", mode="REPEATED", @@ -110,6 +107,6 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Send the entry to the API for creation. # Raises google.api_core.exceptions.AlreadyExists if the Entry already # exists within the project. - entry = client.create_entry(entry_group.name, entry_id, entry) + entry = client.create_entry(request = {'parent': entry_group.name, 'entry_id': entry_id, 'entry': entry}) print("Created entry {}".format(entry.name)) # [END datacatalog_create_fileset_quickstart_tag] diff --git a/datacatalog/snippets/README.rst b/datacatalog/snippets/README.rst index 3476cceaf360..343431d91532 100644 --- a/datacatalog/snippets/README.rst +++ b/datacatalog/snippets/README.rst @@ -1,4 +1,3 @@ - .. This file is automatically generated. Do not edit this file directly. Google Cloud Data Catalog Python Samples @@ -16,11 +15,13 @@ This directory contains samples for Google Cloud Data Catalog. `Google Cloud Dat .. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs + + + Setup ------------------------------------------------------------------------------- - Authentication ++++++++++++++ @@ -31,9 +32,6 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started - - - Install Dependencies ++++++++++++++++++++ @@ -48,7 +46,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. .. code-block:: bash @@ -64,15 +62,9 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ - - - - - Samples ------------------------------------------------------------------------------- - Lookup entry +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -88,7 +80,6 @@ To run this sample: $ python lookup_entry.py - usage: lookup_entry.py [-h] project_id {bigquery-dataset,bigquery-table,pubsub-topic} ... @@ -116,10 +107,6 @@ To run this sample: - - - - The client library ------------------------------------------------------------------------------- @@ -135,5 +122,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py index 4b6e8c5873fb..656cb97e6452 100644 --- a/datacatalog/snippets/lookup_entry.py +++ b/datacatalog/snippets/lookup_entry.py @@ -34,7 +34,7 @@ def lookup_bigquery_dataset(project_id, dataset_id): resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}'\ .format(project_id, dataset_id) - return datacatalog.lookup_entry(linked_resource=resource_name) + return datacatalog.lookup_entry(request={'linked_resource': resource_name}) # [END datacatalog_lookup_dataset] @@ -48,7 +48,7 @@ def lookup_bigquery_dataset_sql_resource(project_id, dataset_id): sql_resource = 'bigquery.dataset.`{}`.`{}`'.format(project_id, dataset_id) - return datacatalog.lookup_entry(sql_resource=sql_resource) + return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) def lookup_bigquery_table(project_id, dataset_id, table_id): @@ -61,7 +61,7 @@ def lookup_bigquery_table(project_id, dataset_id, table_id): '/tables/{}'\ .format(project_id, dataset_id, table_id) - return datacatalog.lookup_entry(linked_resource=resource_name) + return datacatalog.lookup_entry(request={'linked_resource': resource_name}) def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): @@ -75,7 +75,7 @@ def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): sql_resource = 'bigquery.table.`{}`.`{}`.`{}`'.format( project_id, dataset_id, table_id) - return datacatalog.lookup_entry(sql_resource=sql_resource) + return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) def lookup_pubsub_topic(project_id, topic_id): @@ -87,7 +87,7 @@ def lookup_pubsub_topic(project_id, topic_id): resource_name = '//pubsub.googleapis.com/projects/{}/topics/{}'\ .format(project_id, topic_id) - return datacatalog.lookup_entry(linked_resource=resource_name) + return datacatalog.lookup_entry(request={'linked_resource': resource_name}) def lookup_pubsub_topic_sql_resource(project_id, topic_id): @@ -100,7 +100,7 @@ def lookup_pubsub_topic_sql_resource(project_id, topic_id): sql_resource = 'pubsub.topic.`{}`.`{}`'.format(project_id, topic_id) - return datacatalog.lookup_entry(sql_resource=sql_resource) + return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) if __name__ == '__main__': diff --git a/datacatalog/tests/conftest.py b/datacatalog/tests/conftest.py index 75e6753ff446..6ee1fcb621ea 100644 --- a/datacatalog/tests/conftest.py +++ b/datacatalog/tests/conftest.py @@ -52,7 +52,7 @@ def random_entry_id(client, project_id, random_entry_group_id): entry_name = datacatalog_v1beta1.DataCatalogClient.entry_path( project_id, "us-central1", random_entry_group_id, random_entry_id ) - client.delete_entry(entry_name) + client.delete_entry(request = {'name': entry_name}) @pytest.fixture @@ -65,7 +65,7 @@ def random_entry_group_id(client, project_id): entry_group_name = datacatalog_v1beta1.DataCatalogClient.entry_group_path( project_id, "us-central1", random_entry_group_id ) - client.delete_entry_group(entry_group_name) + client.delete_entry_group(request = {'name': entry_group_name}) @pytest.fixture @@ -76,7 +76,7 @@ def random_entry_name(client, entry_group_name): ) random_entry_name = "{}/entries/{}".format(entry_group_name, random_entry_id) yield random_entry_name - client.delete_entry(random_entry_name) + client.delete_entry(request = {'name': random_entry_name}) @pytest.fixture @@ -86,9 +86,6 @@ def entry_group_name(client, project_id): now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) entry_group = client.create_entry_group( - datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), - entry_group_id, - {}, - ) + request = {'parent': datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), 'entry_group_id': entry_group_id, 'entry_group': {}}) yield entry_group.name - client.delete_entry_group(entry_group.name) + client.delete_entry_group(request = {'name': entry_group.name}) diff --git a/datacatalog/tests/test_create_entry_group.py b/datacatalog/tests/test_create_entry_group.py index 9c8c33b8cd64..443c97f92921 100644 --- a/datacatalog/tests/test_create_entry_group.py +++ b/datacatalog/tests/test_create_entry_group.py @@ -18,7 +18,7 @@ def test_create_entry_group(capsys, client, project_id, random_entry_group_id): - create_entry_group.create_entry_group(client, project_id, random_entry_group_id) + create_entry_group.create_entry_group(request = {'parent': client, 'entry_group_id': project_id, 'entry_group': random_entry_group_id}) out, err = capsys.readouterr() assert ( "Created entry group" diff --git a/datacatalog/v1beta1/create_entry_group.py b/datacatalog/v1beta1/create_entry_group.py index 24a856d8739c..d2056ec63d2c 100644 --- a/datacatalog/v1beta1/create_entry_group.py +++ b/datacatalog/v1beta1/create_entry_group.py @@ -40,7 +40,7 @@ def create_entry_group(client, project_id, entry_group_id): ) # Construct a full EntryGroup object to send to the API. - entry_group = datacatalog_v1beta1.types.EntryGroup() + entry_group = datacatalog_v1beta1.EntryGroup() entry_group.display_name = "My Entry Group" entry_group.description = "This Entry Group consists of ..." @@ -48,7 +48,6 @@ def create_entry_group(client, project_id, entry_group_id): # Raises google.api_core.exceptions.AlreadyExists if the Entry Group # already exists within the project. entry_group = client.create_entry_group( - parent, entry_group_id, entry_group - ) # Make an API request. + request = {'parent': parent, 'entry_group_id': entry_group_id, 'entry_group': entry_group}) # Make an API request. print("Created entry group {}".format(entry_group.name)) # [END datacatalog_create_entry_group_tag] diff --git a/datacatalog/v1beta1/create_fileset_entry.py b/datacatalog/v1beta1/create_fileset_entry.py index 6cc275655988..f96255b2bcd8 100644 --- a/datacatalog/v1beta1/create_fileset_entry.py +++ b/datacatalog/v1beta1/create_fileset_entry.py @@ -81,6 +81,6 @@ def create_fileset_entry(client, entry_group_name, entry_id): # Send the entry to the API for creation. # Raises google.api_core.exceptions.AlreadyExists if the Entry already # exists within the project. - entry = client.create_entry(entry_group_name, entry_id, entry) + entry = client.create_entry(request = {'parent': entry_group_name, 'entry_id': entry_id, 'entry': entry}) print("Created entry {}".format(entry.name)) # [END datacatalog_create_fileset_tag] diff --git a/datacatalog/v1beta1/datacatalog_get_entry.py b/datacatalog/v1beta1/datacatalog_get_entry.py index fcd8b2096c7e..05bc0dd52aa3 100644 --- a/datacatalog/v1beta1/datacatalog_get_entry.py +++ b/datacatalog/v1beta1/datacatalog_get_entry.py @@ -26,8 +26,6 @@ # [START datacatalog_get_entry] from google.cloud import datacatalog_v1beta1 -from google.cloud.datacatalog_v1beta1 import enums - def sample_get_entry(project_id, location_id, entry_group_id, entry_id): """ @@ -48,10 +46,10 @@ def sample_get_entry(project_id, location_id, entry_group_id, entry_id): # entry_id = '[Entry ID]' name = client.entry_path(project_id, location_id, entry_group_id, entry_id) - response = client.get_entry(name) + response = client.get_entry(request = {'name': name}) entry = response print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) print(u"Linked resource: {}".format(entry.linked_resource)) diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry.py b/datacatalog/v1beta1/datacatalog_lookup_entry.py index 7920df16bf2f..176d080db766 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry.py @@ -26,7 +26,6 @@ # [START datacatalog_lookup_entry] from google.cloud import datacatalog_v1beta1 -from google.cloud.datacatalog_v1beta1 import enums def sample_lookup_entry(resource_name): @@ -45,10 +44,10 @@ def sample_lookup_entry(resource_name): client = datacatalog_v1beta1.DataCatalogClient() # resource_name = '[Full Resource Name]' - response = client.lookup_entry(linked_resource=resource_name) + response = client.lookup_entry(request = {'linked_resource': resource_name}) entry = response print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) print(u"Linked resource: {}".format(entry.linked_resource)) diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py index 9656759ef4bd..f46af3698080 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py @@ -26,7 +26,6 @@ # [START datacatalog_lookup_entry_sql_resource] from google.cloud import datacatalog_v1beta1 -from google.cloud.datacatalog_v1beta1 import enums def sample_lookup_entry(sql_name): @@ -44,10 +43,10 @@ def sample_lookup_entry(sql_name): client = datacatalog_v1beta1.DataCatalogClient() # sql_name = '[SQL Resource Name]' - response = client.lookup_entry(sql_resource=sql_name) + response = client.lookup_entry(request = {'sql_resource': sql_name}) entry = response print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) print(u"Linked resource: {}".format(entry.linked_resource)) diff --git a/datacatalog/v1beta1/datacatalog_search.py b/datacatalog/v1beta1/datacatalog_search.py index c4c1798c1cc9..ad10276698a4 100644 --- a/datacatalog/v1beta1/datacatalog_search.py +++ b/datacatalog/v1beta1/datacatalog_search.py @@ -54,7 +54,7 @@ def sample_search_catalog(include_project_id, include_gcp_public_datasets, query } # Iterate over all results - for response_item in client.search_catalog(scope, query): + for response_item in client.search_catalog(request = {'scope': scope, 'query': query}): print( u"Result type: {}".format( enums.SearchResultType(response_item.search_result_type).name